telemeta.models.item module
# -*- coding: utf-8 -*- # Copyright (C) 2010 Samalyse SARL # Copyright (C) 2010-2014 Parisson SARL # This file is part of Telemeta. # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Authors: Olivier Guilyardi <olivier@samalyse.com> # David LIPSZYC <davidlipszyc@gmail.com> # Guillaume Pellerin <yomguy@parisson.com> from __future__ import division from django.utils.translation import ugettext_lazy as _ from telemeta.models.core import * from telemeta.models.resource import * from telemeta.models.query import * from telemeta.models.identifier import * from telemeta.models.resource import * from telemeta.models.enum import * item_published_code_regex = getattr(settings, 'ITEM_PUBLISHED_CODE_REGEX', '[A-Za-z0-9._-]*') item_unpublished_code_regex = getattr(settings, 'ITEM_UNPUBLISHED_CODE_REGEX', '[A-Za-z0-9._-]*') item_code_regex = '(?:%s|%s)' % (item_published_code_regex, item_unpublished_code_regex) ITEM_PUBLIC_ACCESS_CHOICES = (('none', _('none')), ('metadata', _('metadata')), ('full', _('full'))) ITEM_TRANSODING_STATUS = ((0, _('broken')), (1, _('pending')), (2, _('processing')), (3, _('done')), (5, _('ready'))) class MediaItem(MediaResource): "Describe an item" element_type = 'item' # Main Informations title = CharField(_('title')) alt_title = CharField(_('original title / translation')) collector = CharField(_('collector'), help_text=_('First name, Last name ; First name, Last name')) collection = ForeignKey('MediaCollection', related_name="items", verbose_name=_('collection')) recorded_from_date = DateField(_('recording date (from)'), help_text=_('YYYY-MM-DD')) recorded_to_date = DateField(_('recording date (until)'), help_text=_('YYYY-MM-DD')) public_access = CharField(_('access type'), choices=ITEM_PUBLIC_ACCESS_CHOICES, max_length=16, default="metadata") # Geographic and cultural informations location = WeakForeignKey('Location', verbose_name=_('location')) location_comment = CharField(_('location details')) cultural_area = CharField(_('cultural area')) language = CharField(_('language')) language_iso = ForeignKey('Language', related_name="items", verbose_name=_('Language (ISO norm)'), blank=True, null=True, on_delete=models.SET_NULL) ethnic_group = WeakForeignKey('EthnicGroup', related_name="items", verbose_name=_('population / social group')) context_comment = TextField(_('Ethnographic context')) # Musical informations moda_execut = CharField(_('implementing rules')) vernacular_style = WeakForeignKey('VernacularStyle', related_name="items", verbose_name=_('vernacular style')) generic_style = WeakForeignKey('GenericStyle', related_name="items", verbose_name=_('generic style')) author = CharField(_('author / compositor'), help_text=_('First name, Last name ; First name, Last name')) # Legal mentions organization = WeakForeignKey('Organization', verbose_name=_('organization')) depositor = CharField(_('depositor')) rights = WeakForeignKey('Rights', verbose_name=_('rights')) # Archiving data code = CharField(_('code'), unique=True, blank=True, required=True, help_text=_('CollectionCode_ItemCode')) old_code = CharField(_('original code'), unique=False, blank=True) track = CharField(_('item number')) collector_selection = CharField(_('collector selection')) collector_from_collection = BooleanField(_('collector as in collection')) creator_reference = CharField(_('creator reference')) external_references = TextField(_('published references')) auto_period_access = BooleanField(_('automatic access after a rolling period'), default=True) comment = TextField(_('remarks')) # Technical data media_type = WeakForeignKey('MediaType', related_name="items", verbose_name=_('media type')) approx_duration = DurationField(_('approximative duration'), blank=True, help_text=_('hh:mm:ss')) mimetype = CharField(_('mime type'), max_length=255, blank=True) file = FileField(_('file'), upload_to='items/%Y/%m/%d', db_column="filename", max_length=1024) url = URLField(_('URL'), max_length=512, blank=True) # LAM recordist = CharField(_('recordist')) digitalist = CharField(_('digitalist')) digitization_date = DateField(_('digitization date')) publishing_date = DateField(_('publishing date')) scientist = CharField(_('scientist'), help_text=_('First name, Last name ; First name, Last name')) topic = WeakForeignKey('Topic', verbose_name=_('topic')) summary = TextField(_('summary')) contributor = CharField(_('contributor')) # Manager objects = MediaItemManager() exclude = ['copied_from_item', 'mimetype', 'organization', 'depositor', 'rights', 'recordist', 'digitalist', 'digitization_date', 'publishing_date', 'scientist', 'topic', 'summary', 'contributor', ] restricted = ['copied_from_item', 'mimetype', 'organization', 'depositor', 'rights', 'recordist', 'digitalist', 'digitization_date', 'publishing_date', 'scientist', 'topic', 'summary', 'contributor', 'public_access'] def keywords(self): return ContextKeyword.objects.filter(item_relations__item = self) keywords.verbose_name = _('keywords') @property def public_id(self): if self.code: return self.code return str(self.id) @property def mime_type(self): if not self.mimetype: if self.file: if os.path.exists(self.file.path): self.mimetype = mimetypes.guess_type(self.file.path)[0] self.save() return self.mimetype else: return 'none' else: return 'none' else: return _('none') class Meta(MetaCore): db_table = 'media_items' permissions = (("can_play_all_items", "Can play all media items"), ("can_download_all_items", "Can download all media items"), ("can_run_analysis", "Can run analysis"),) verbose_name = _('item') def is_valid_code(self, code): "Check if the item code is well formed" if not re.match('^' + self.collection.code, self.code): return False if self.collection.is_published: regex = '^' + item_published_code_regex + '$' else: regex = '^' + item_unpublished_code_regex + '$' if re.match(regex, code): return True return False def clean(self): if strict_code: if self.code and not self.is_valid_code(self.code): raise ValidationError("%s is not a valid item code for collection %s" % (self.code, self.collection.code)) def save(self, force_insert=False, force_update=False, *args, **kwargs): super(MediaItem, self).save(force_insert, force_update, *args, **kwargs) def computed_duration(self): "Tell the length in seconds of this item media data" return self.approx_duration computed_duration.verbose_name = _('computed duration') def __unicode__(self): if self.title and not re.match('^ *N *$', self.title): title = self.title else: title = unicode(self.collection.title) if self.track: title += ' ' + self.track return title def get_source(self): source = None source_type = None if self.file and os.path.exists(self.file.path): source = self.file.path source_type = 'file' elif self.url: source = self.url source_type = 'url' return source, source_type @property def instruments(self): "Return the instruments of the item" instruments = [] performances = MediaItemPerformance.objects.filter(media_item=self) for performance in performances: instrument = performance.instrument alias = performance.alias if not instrument in instruments: instruments.append(instrument) if not alias in instruments: instruments.append(alias) #no reference for __name_cmp anywhere instruments.sort(self.__name_cmp) return instruments instruments.verbose_name = _("instruments") def size(self): if self.file and os.path.exists(self.file.path): return self.file.size else: return 0 size.verbose_name = _('item size') def get_url(self): return get_full_url(reverse('telemeta-item-detail', kwargs={'public_id':self.pk})) def to_dict_with_more(self): # metadata = model_to_dict(self, fields=[], exclude=self.exclude) metadata = self.to_dict() for key in self.exclude: if key in metadata.keys(): del metadata[key] metadata['url'] = self.get_url() revision = self.get_revision() if revision: time = unicode(revision.time) else: time = '' metadata['last_modification_date'] = time metadata['collection'] = self.collection.get_url() keywords = [] for keyword in self.keywords(): keywords.append(keyword.value) metadata['keywords'] = ';'.join(keywords) related_media_urls = [] for media in self.related.all(): if media.url: related_media_urls.append(media.url) else: try: url = get_full_url(reverse('telemeta-item-related', kwargs={'public_id': self.public_id, 'media_id': media.id})) except: url = '' related_media_urls.append(url) metadata['related_media_urls'] = ';'.join(related_media_urls) instruments = [] instrument_vernacular_names = [] performers = [] for performance in self.performances.all(): if performance.instrument: instruments.append(performance.instrument.name) if performance.alias: instrument_vernacular_names.append(performance.alias.name) if performance.musicians: performers.append(performance.musicians.replace(' et ', ';')) metadata['instruments'] = ';'.join(instruments) metadata['instrument_vernacular_names'] = ';'.join(instrument_vernacular_names) metadata['performers'] = ';'.join(performers) analyzers = ['channels', 'samplerate', 'duration', 'resolution', 'mime_type'] for analyzer_id in analyzers: analysis = MediaItemAnalysis.objects.filter(item=self, analyzer_id=analyzer_id) if analysis: if analyzer_id == 'duration': value = ':'.join([str('%.2d' % int(float(t))) for t in analysis[0].value.split(':')]) else: value = analysis[0].value metadata[analyzer_id] = value elif analyzer_id == 'duration': metadata[analyzer_id] = self.approx_duration else: metadata[analyzer_id] = '' metadata['file_size'] = unicode(self.size()) metadata['thumbnail'] = get_full_url(reverse('telemeta-item-visualize', kwargs={'public_id': self.public_id, 'grapher_id': 'waveform_centroid', 'width': 346, 'height': 130})) # One ID only identifiers = self.identifiers.all() if identifiers: identifier = identifiers[0] metadata['identifier_id'] = identifier.identifier metadata['identifier_type'] = identifier.type metadata['identifier_date'] = unicode(identifier.date_last) metadata['identifier_note'] = identifier.notes else: metadata['identifier_id'] = '' metadata['identifier_type'] = '' metadata['identifier_date'] = '' metadata['identifier_note'] = '' # Collection metadata['recording_context'] = self.collection.recording_context metadata['description_collection'] = self.collection.description metadata['status'] = self.collection.status metadata['original_format'] = self.collection.original_format metadata['physical_format'] = self.collection.physical_format metadata['year_published'] = self.collection.year_published metadata['publisher'] = self.collection.publisher metadata['publisher_collection'] = self.collection.publisher_collection metadata['reference_collection'] = self.collection.reference return metadata def to_row(self, tags): row = [] _dict = self.to_dict_with_more() for tag in tags: if tag in _dict.keys(): row.append(_dict[tag]) else: row.append('') return row class MediaItemRelated(MediaRelated): "Item related media" item = ForeignKey('MediaItem', related_name="related", verbose_name=_('item')) def parse_markers(self, **kwargs): # Parse KDEnLive session if self.file: if self.is_kdenlive_session(): session = KDEnLiveSession(self.file.path) markers = session.markers(**kwargs) for marker in markers: m = MediaItemMarker(item=self.item) m.public_id = get_random_hash() m.time = float(marker['time']) m.title = marker['comment'] m.save() return markers class Meta(MetaCore): db_table = 'media_item_related' verbose_name = _('item related media') verbose_name_plural = _('item related media') class MediaItemKeyword(ModelCore): "Item keyword" item = ForeignKey('MediaItem', verbose_name=_('item'), related_name="keyword_relations") keyword = ForeignKey('ContextKeyword', verbose_name=_('keyword'), related_name="item_relations") class Meta(MetaCore): db_table = 'media_item_keywords' unique_together = (('item', 'keyword'),) class MediaItemPerformance(ModelCore): "Item performance" media_item = ForeignKey('MediaItem', related_name="performances", verbose_name=_('item')) instrument = WeakForeignKey('Instrument', related_name="performances", verbose_name=_('composition')) alias = WeakForeignKey('InstrumentAlias', related_name="performances", verbose_name=_('vernacular name')) instruments_num = CharField(_('number')) musicians = CharField(_('interprets')) class Meta(MetaCore): db_table = 'media_item_performances' class MediaItemAnalysis(ModelCore): "Item analysis result computed by TimeSide" element_type = 'analysis' item = ForeignKey('MediaItem', related_name="analysis", verbose_name=_('item')) analyzer_id = CharField(_('id'), required=True) name = CharField(_('name')) value = CharField(_('value')) unit = CharField(_('unit')) class Meta(MetaCore): db_table = 'media_analysis' ordering = ['name'] def to_dict(self): if self.analyzer_id == 'duration': if '.' in self.value: value = self.value.split('.') self.value = '.'.join([value[0], value[1][:2]]) return {'id': self.analyzer_id, 'name': self.name, 'value': self.value, 'unit': self.unit} class MediaItemMarker(MediaResource): "2D marker object : text value vs. time (in seconds)" element_type = 'marker' item = ForeignKey('MediaItem', related_name="markers", verbose_name=_('item')) public_id = CharField(_('public_id'), required=True) time = FloatField(_('time (s)')) title = CharField(_('title')) date = DateTimeField(_('date'), auto_now=True) description = TextField(_('description')) author = ForeignKey(User, related_name="markers", verbose_name=_('author'), blank=True, null=True) class Meta(MetaCore): db_table = 'media_markers' ordering = ['time'] def __unicode__(self): if self.title: return self.title else: return self.public_id class MediaItemTranscoded(MediaResource): "Item file transcoded" element_type = 'transcoded item' item = models.ForeignKey('MediaItem', related_name="transcoded", verbose_name=_('item')) mimetype = models.CharField(_('mime_type'), max_length=255, blank=True) date_added = DateTimeField(_('date'), auto_now_add=True) status = models.IntegerField(_('status'), choices=ITEM_TRANSODING_STATUS, default=1) file = models.FileField(_('file'), upload_to='items/%Y/%m/%d', max_length=1024, blank=True) @property def mime_type(self): if not self.mimetype: if self.file: if os.path.exists(self.file.path): self.mimetype = mimetypes.guess_type(self.file.path)[0] self.save() return self.mimetype else: return 'none' else: return 'none' else: return self.mimetype def __unicode__(self): if self.item.title: return self.item.title + ' - ' + self.mime_type else: return self.item.public_id + ' - ' + self.mime_type class Meta(MetaCore): db_table = app_name + '_media_transcoded' class MediaItemTranscodingFlag(ModelCore): "Item flag to know if the MediaItem has been transcoded to a given format" item = ForeignKey('MediaItem', related_name="transcoding", verbose_name=_('item')) mime_type = CharField(_('mime_type'), required=True) date = DateTimeField(_('date'), auto_now=True) value = BooleanField(_('transcoded')) class Meta(MetaCore): db_table = 'media_transcoding' class MediaItemIdentifier(Identifier): """Item identifier""" item = ForeignKey(MediaItem, related_name="identifiers", verbose_name=_('item')) class Meta(MetaCore): db_table = 'media_item_identifier' verbose_name = _('item identifier') verbose_name_plural = _('item identifiers') unique_together = ('identifier', 'item') class MediaPart(MediaResource): "Describe an item part" element_type = 'part' item = ForeignKey('MediaItem', related_name="parts", verbose_name=_('item')) title = CharField(_('title'), required=True) start = FloatField(_('start'), required=True) end = FloatField(_('end'), required=True) class Meta(MetaCore): db_table = 'media_parts' verbose_name = _('item part') def __unicode__(self): return self.title
Module variables
var ITEM_PUBLIC_ACCESS_CHOICES
var ITEM_TRANSODING_STATUS
var PUBLIC_ACCESS_CHOICES
var app_name
var code_linesep
var default_decoding
var default_encoding
var engine
var eol
var ext
var item_code_regex
var item_published_code_regex
var item_unpublished_code_regex
var mime_type
var private_extra_types
var public_extra_types
var resource_code_regex
var strict_code
Classes
class MediaItem
Describe an item
class MediaItem(MediaResource): "Describe an item" element_type = 'item' # Main Informations title = CharField(_('title')) alt_title = CharField(_('original title / translation')) collector = CharField(_('collector'), help_text=_('First name, Last name ; First name, Last name')) collection = ForeignKey('MediaCollection', related_name="items", verbose_name=_('collection')) recorded_from_date = DateField(_('recording date (from)'), help_text=_('YYYY-MM-DD')) recorded_to_date = DateField(_('recording date (until)'), help_text=_('YYYY-MM-DD')) public_access = CharField(_('access type'), choices=ITEM_PUBLIC_ACCESS_CHOICES, max_length=16, default="metadata") # Geographic and cultural informations location = WeakForeignKey('Location', verbose_name=_('location')) location_comment = CharField(_('location details')) cultural_area = CharField(_('cultural area')) language = CharField(_('language')) language_iso = ForeignKey('Language', related_name="items", verbose_name=_('Language (ISO norm)'), blank=True, null=True, on_delete=models.SET_NULL) ethnic_group = WeakForeignKey('EthnicGroup', related_name="items", verbose_name=_('population / social group')) context_comment = TextField(_('Ethnographic context')) # Musical informations moda_execut = CharField(_('implementing rules')) vernacular_style = WeakForeignKey('VernacularStyle', related_name="items", verbose_name=_('vernacular style')) generic_style = WeakForeignKey('GenericStyle', related_name="items", verbose_name=_('generic style')) author = CharField(_('author / compositor'), help_text=_('First name, Last name ; First name, Last name')) # Legal mentions organization = WeakForeignKey('Organization', verbose_name=_('organization')) depositor = CharField(_('depositor')) rights = WeakForeignKey('Rights', verbose_name=_('rights')) # Archiving data code = CharField(_('code'), unique=True, blank=True, required=True, help_text=_('CollectionCode_ItemCode')) old_code = CharField(_('original code'), unique=False, blank=True) track = CharField(_('item number')) collector_selection = CharField(_('collector selection')) collector_from_collection = BooleanField(_('collector as in collection')) creator_reference = CharField(_('creator reference')) external_references = TextField(_('published references')) auto_period_access = BooleanField(_('automatic access after a rolling period'), default=True) comment = TextField(_('remarks')) # Technical data media_type = WeakForeignKey('MediaType', related_name="items", verbose_name=_('media type')) approx_duration = DurationField(_('approximative duration'), blank=True, help_text=_('hh:mm:ss')) mimetype = CharField(_('mime type'), max_length=255, blank=True) file = FileField(_('file'), upload_to='items/%Y/%m/%d', db_column="filename", max_length=1024) url = URLField(_('URL'), max_length=512, blank=True) # LAM recordist = CharField(_('recordist')) digitalist = CharField(_('digitalist')) digitization_date = DateField(_('digitization date')) publishing_date = DateField(_('publishing date')) scientist = CharField(_('scientist'), help_text=_('First name, Last name ; First name, Last name')) topic = WeakForeignKey('Topic', verbose_name=_('topic')) summary = TextField(_('summary')) contributor = CharField(_('contributor')) # Manager objects = MediaItemManager() exclude = ['copied_from_item', 'mimetype', 'organization', 'depositor', 'rights', 'recordist', 'digitalist', 'digitization_date', 'publishing_date', 'scientist', 'topic', 'summary', 'contributor', ] restricted = ['copied_from_item', 'mimetype', 'organization', 'depositor', 'rights', 'recordist', 'digitalist', 'digitization_date', 'publishing_date', 'scientist', 'topic', 'summary', 'contributor', 'public_access'] def keywords(self): return ContextKeyword.objects.filter(item_relations__item = self) keywords.verbose_name = _('keywords') @property def public_id(self): if self.code: return self.code return str(self.id) @property def mime_type(self): if not self.mimetype: if self.file: if os.path.exists(self.file.path): self.mimetype = mimetypes.guess_type(self.file.path)[0] self.save() return self.mimetype else: return 'none' else: return 'none' else: return _('none') class Meta(MetaCore): db_table = 'media_items' permissions = (("can_play_all_items", "Can play all media items"), ("can_download_all_items", "Can download all media items"), ("can_run_analysis", "Can run analysis"),) verbose_name = _('item') def is_valid_code(self, code): "Check if the item code is well formed" if not re.match('^' + self.collection.code, self.code): return False if self.collection.is_published: regex = '^' + item_published_code_regex + '$' else: regex = '^' + item_unpublished_code_regex + '$' if re.match(regex, code): return True return False def clean(self): if strict_code: if self.code and not self.is_valid_code(self.code): raise ValidationError("%s is not a valid item code for collection %s" % (self.code, self.collection.code)) def save(self, force_insert=False, force_update=False, *args, **kwargs): super(MediaItem, self).save(force_insert, force_update, *args, **kwargs) def computed_duration(self): "Tell the length in seconds of this item media data" return self.approx_duration computed_duration.verbose_name = _('computed duration') def __unicode__(self): if self.title and not re.match('^ *N *$', self.title): title = self.title else: title = unicode(self.collection.title) if self.track: title += ' ' + self.track return title def get_source(self): source = None source_type = None if self.file and os.path.exists(self.file.path): source = self.file.path source_type = 'file' elif self.url: source = self.url source_type = 'url' return source, source_type @property def instruments(self): "Return the instruments of the item" instruments = [] performances = MediaItemPerformance.objects.filter(media_item=self) for performance in performances: instrument = performance.instrument alias = performance.alias if not instrument in instruments: instruments.append(instrument) if not alias in instruments: instruments.append(alias) #no reference for __name_cmp anywhere instruments.sort(self.__name_cmp) return instruments instruments.verbose_name = _("instruments") def size(self): if self.file and os.path.exists(self.file.path): return self.file.size else: return 0 size.verbose_name = _('item size') def get_url(self): return get_full_url(reverse('telemeta-item-detail', kwargs={'public_id':self.pk})) def to_dict_with_more(self): # metadata = model_to_dict(self, fields=[], exclude=self.exclude) metadata = self.to_dict() for key in self.exclude: if key in metadata.keys(): del metadata[key] metadata['url'] = self.get_url() revision = self.get_revision() if revision: time = unicode(revision.time) else: time = '' metadata['last_modification_date'] = time metadata['collection'] = self.collection.get_url() keywords = [] for keyword in self.keywords(): keywords.append(keyword.value) metadata['keywords'] = ';'.join(keywords) related_media_urls = [] for media in self.related.all(): if media.url: related_media_urls.append(media.url) else: try: url = get_full_url(reverse('telemeta-item-related', kwargs={'public_id': self.public_id, 'media_id': media.id})) except: url = '' related_media_urls.append(url) metadata['related_media_urls'] = ';'.join(related_media_urls) instruments = [] instrument_vernacular_names = [] performers = [] for performance in self.performances.all(): if performance.instrument: instruments.append(performance.instrument.name) if performance.alias: instrument_vernacular_names.append(performance.alias.name) if performance.musicians: performers.append(performance.musicians.replace(' et ', ';')) metadata['instruments'] = ';'.join(instruments) metadata['instrument_vernacular_names'] = ';'.join(instrument_vernacular_names) metadata['performers'] = ';'.join(performers) analyzers = ['channels', 'samplerate', 'duration', 'resolution', 'mime_type'] for analyzer_id in analyzers: analysis = MediaItemAnalysis.objects.filter(item=self, analyzer_id=analyzer_id) if analysis: if analyzer_id == 'duration': value = ':'.join([str('%.2d' % int(float(t))) for t in analysis[0].value.split(':')]) else: value = analysis[0].value metadata[analyzer_id] = value elif analyzer_id == 'duration': metadata[analyzer_id] = self.approx_duration else: metadata[analyzer_id] = '' metadata['file_size'] = unicode(self.size()) metadata['thumbnail'] = get_full_url(reverse('telemeta-item-visualize', kwargs={'public_id': self.public_id, 'grapher_id': 'waveform_centroid', 'width': 346, 'height': 130})) # One ID only identifiers = self.identifiers.all() if identifiers: identifier = identifiers[0] metadata['identifier_id'] = identifier.identifier metadata['identifier_type'] = identifier.type metadata['identifier_date'] = unicode(identifier.date_last) metadata['identifier_note'] = identifier.notes else: metadata['identifier_id'] = '' metadata['identifier_type'] = '' metadata['identifier_date'] = '' metadata['identifier_note'] = '' # Collection metadata['recording_context'] = self.collection.recording_context metadata['description_collection'] = self.collection.description metadata['status'] = self.collection.status metadata['original_format'] = self.collection.original_format metadata['physical_format'] = self.collection.physical_format metadata['year_published'] = self.collection.year_published metadata['publisher'] = self.collection.publisher metadata['publisher_collection'] = self.collection.publisher_collection metadata['reference_collection'] = self.collection.reference return metadata def to_row(self, tags): row = [] _dict = self.to_dict_with_more() for tag in tags: if tag in _dict.keys(): row.append(_dict[tag]) else: row.append('') return row
Ancestors (in MRO)
- MediaItem
- telemeta.models.resource.MediaResource
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var alt_title
var analysis
var approx_duration
var auto_period_access
var code
var collection
var collector
var collector_from_collection
var collector_selection
var comment
var compare_function
var context_comment
var contributor
var creator_reference
var cultural_area
var depositor
var digitalist
var digitization_date
var element_type
var ethnic_group
var exclude
var external_references
var file
var format
var generic_style
var identifiers
var keyword_relations
var language
var language_iso
var location
var location_comment
var markers
var media_type
var mimetype
var moda_execut
var objects
var old_code
var organization
var parts
var performances
var public_access
var publishing_date
var recorded_from_date
var recorded_to_date
var recordist
var restricted
var rights
var scientist
var summary
var title
var topic
var track
var transcoded
var transcoding
var url
var vernacular_style
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var instruments
Return the instruments of the item
var mime_type
var pk
var public_id
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
def clean(self): if strict_code: if self.code and not self.is_valid_code(self.code): raise ValidationError("%s is not a valid item code for collection %s" % (self.code, self.collection.code))
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def computed_duration(
self)
Tell the length in seconds of this item media data
def computed_duration(self): "Tell the length in seconds of this item media data" return self.approx_duration
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def get_public_access_display(
*moreargs, **morekwargs)
def _curried(*moreargs, **morekwargs): return _curried_func(*(args + moreargs), **dict(kwargs, **morekwargs))
def get_revision(
self)
def get_revision(self): revisions = Revision.objects.filter(element_type=self.element_type, element_id=self.id).order_by('-time') if revisions: return revisions[0] else: return None
def get_source(
self)
def get_source(self): source = None source_type = None if self.file and os.path.exists(self.file.path): source = self.file.path source_type = 'file' elif self.url: source = self.url source_type = 'url' return source, source_type
def get_url(
self)
def get_url(self): return get_full_url(reverse('telemeta-item-detail', kwargs={'public_id':self.pk}))
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def is_valid_code(
self, code)
Check if the item code is well formed
def is_valid_code(self, code): "Check if the item code is well formed" if not re.match('^' + self.collection.code, self.code): return False if self.collection.is_published: regex = '^' + item_published_code_regex + '$' else: regex = '^' + item_unpublished_code_regex + '$' if re.match(regex, code): return True return False
def keywords(
self)
def keywords(self): return ContextKeyword.objects.filter(item_relations__item = self)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def public_access_label(
self)
def public_access_label(self): if self.public_access == 'metadata': return _('Metadata only') elif self.public_access == 'full': return _('Sound and metadata') return _('Private data')
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): super(MediaItem, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def set_revision(
self, user)
Save a media object and add a revision
def set_revision(self, user): "Save a media object and add a revision" Revision.touch(self, user)
def size(
self)
def size(self): if self.file and os.path.exists(self.file.path): return self.file.size else: return 0
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dict_with_more(
self)
def to_dict_with_more(self): # metadata = model_to_dict(self, fields=[], exclude=self.exclude) metadata = self.to_dict() for key in self.exclude: if key in metadata.keys(): del metadata[key] metadata['url'] = self.get_url() revision = self.get_revision() if revision: time = unicode(revision.time) else: time = '' metadata['last_modification_date'] = time metadata['collection'] = self.collection.get_url() keywords = [] for keyword in self.keywords(): keywords.append(keyword.value) metadata['keywords'] = ';'.join(keywords) related_media_urls = [] for media in self.related.all(): if media.url: related_media_urls.append(media.url) else: try: url = get_full_url(reverse('telemeta-item-related', kwargs={'public_id': self.public_id, 'media_id': media.id})) except: url = '' related_media_urls.append(url) metadata['related_media_urls'] = ';'.join(related_media_urls) instruments = [] instrument_vernacular_names = [] performers = [] for performance in self.performances.all(): if performance.instrument: instruments.append(performance.instrument.name) if performance.alias: instrument_vernacular_names.append(performance.alias.name) if performance.musicians: performers.append(performance.musicians.replace(' et ', ';')) metadata['instruments'] = ';'.join(instruments) metadata['instrument_vernacular_names'] = ';'.join(instrument_vernacular_names) metadata['performers'] = ';'.join(performers) analyzers = ['channels', 'samplerate', 'duration', 'resolution', 'mime_type'] for analyzer_id in analyzers: analysis = MediaItemAnalysis.objects.filter(item=self, analyzer_id=analyzer_id) if analysis: if analyzer_id == 'duration': value = ':'.join([str('%.2d' % int(float(t))) for t in analysis[0].value.split(':')]) else: value = analysis[0].value metadata[analyzer_id] = value elif analyzer_id == 'duration': metadata[analyzer_id] = self.approx_duration else: metadata[analyzer_id] = '' metadata['file_size'] = unicode(self.size()) metadata['thumbnail'] = get_full_url(reverse('telemeta-item-visualize', kwargs={'public_id': self.public_id, 'grapher_id': 'waveform_centroid', 'width': 346, 'height': 130})) # One ID only identifiers = self.identifiers.all() if identifiers: identifier = identifiers[0] metadata['identifier_id'] = identifier.identifier metadata['identifier_type'] = identifier.type metadata['identifier_date'] = unicode(identifier.date_last) metadata['identifier_note'] = identifier.notes else: metadata['identifier_id'] = '' metadata['identifier_type'] = '' metadata['identifier_date'] = '' metadata['identifier_note'] = '' # Collection metadata['recording_context'] = self.collection.recording_context metadata['description_collection'] = self.collection.description metadata['status'] = self.collection.status metadata['original_format'] = self.collection.original_format metadata['physical_format'] = self.collection.physical_format metadata['year_published'] = self.collection.year_published metadata['publisher'] = self.collection.publisher metadata['publisher_collection'] = self.collection.publisher_collection metadata['reference_collection'] = self.collection.reference return metadata
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def to_row(
self, tags)
def to_row(self, tags): row = [] _dict = self.to_dict_with_more() for tag in tags: if tag in _dict.keys(): row.append(_dict[tag]) else: row.append('') return row
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemAnalysis
Item analysis result computed by TimeSide
class MediaItemAnalysis(ModelCore): "Item analysis result computed by TimeSide" element_type = 'analysis' item = ForeignKey('MediaItem', related_name="analysis", verbose_name=_('item')) analyzer_id = CharField(_('id'), required=True) name = CharField(_('name')) value = CharField(_('value')) unit = CharField(_('unit')) class Meta(MetaCore): db_table = 'media_analysis' ordering = ['name'] def to_dict(self): if self.analyzer_id == 'duration': if '.' in self.value: value = self.value.split('.') self.value = '.'.join([value[0], value[1][:2]]) return {'id': self.analyzer_id, 'name': self.name, 'value': self.value, 'unit': self.unit}
Ancestors (in MRO)
- MediaItemAnalysis
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var analyzer_id
var compare_function
var element_type
var item
var name
var objects
var unit
var value
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def to_dict(
self)
def to_dict(self): if self.analyzer_id == 'duration': if '.' in self.value: value = self.value.split('.') self.value = '.'.join([value[0], value[1][:2]]) return {'id': self.analyzer_id, 'name': self.name, 'value': self.value, 'unit': self.unit}
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemIdentifier
Item identifier
class MediaItemIdentifier(Identifier): """Item identifier""" item = ForeignKey(MediaItem, related_name="identifiers", verbose_name=_('item')) class Meta(MetaCore): db_table = 'media_item_identifier' verbose_name = _('item identifier') verbose_name_plural = _('item identifiers') unique_together = ('identifier', 'item')
Ancestors (in MRO)
- MediaItemIdentifier
- telemeta.models.identifier.Identifier
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var item
var objects
var type
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemKeyword
Item keyword
class MediaItemKeyword(ModelCore): "Item keyword" item = ForeignKey('MediaItem', verbose_name=_('item'), related_name="keyword_relations") keyword = ForeignKey('ContextKeyword', verbose_name=_('keyword'), related_name="item_relations") class Meta(MetaCore): db_table = 'media_item_keywords' unique_together = (('item', 'keyword'),)
Ancestors (in MRO)
- MediaItemKeyword
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var item
var keyword
var objects
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemMarker
2D marker object : text value vs. time (in seconds)
class MediaItemMarker(MediaResource): "2D marker object : text value vs. time (in seconds)" element_type = 'marker' item = ForeignKey('MediaItem', related_name="markers", verbose_name=_('item')) public_id = CharField(_('public_id'), required=True) time = FloatField(_('time (s)')) title = CharField(_('title')) date = DateTimeField(_('date'), auto_now=True) description = TextField(_('description')) author = ForeignKey(User, related_name="markers", verbose_name=_('author'), blank=True, null=True) class Meta(MetaCore): db_table = 'media_markers' ordering = ['time'] def __unicode__(self): if self.title: return self.title else: return self.public_id
Ancestors (in MRO)
- MediaItemMarker
- telemeta.models.resource.MediaResource
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var date
var description
var element_type
var item
var objects
var public_id
var time
var title
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def get_revision(
self)
def get_revision(self): revisions = Revision.objects.filter(element_type=self.element_type, element_id=self.id).order_by('-time') if revisions: return revisions[0] else: return None
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def public_access_label(
self)
def public_access_label(self): if self.public_access == 'metadata': return _('Metadata only') elif self.public_access == 'full': return _('Sound and metadata') return _('Private data')
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def set_revision(
self, user)
Save a media object and add a revision
def set_revision(self, user): "Save a media object and add a revision" Revision.touch(self, user)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemPerformance
Item performance
class MediaItemPerformance(ModelCore): "Item performance" media_item = ForeignKey('MediaItem', related_name="performances", verbose_name=_('item')) instrument = WeakForeignKey('Instrument', related_name="performances", verbose_name=_('composition')) alias = WeakForeignKey('InstrumentAlias', related_name="performances", verbose_name=_('vernacular name')) instruments_num = CharField(_('number')) musicians = CharField(_('interprets')) class Meta(MetaCore): db_table = 'media_item_performances'
Ancestors (in MRO)
- MediaItemPerformance
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var alias
var compare_function
var instrument
var instruments_num
var media_item
var musicians
var objects
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemRelated
Item related media
class MediaItemRelated(MediaRelated): "Item related media" item = ForeignKey('MediaItem', related_name="related", verbose_name=_('item')) def parse_markers(self, **kwargs): # Parse KDEnLive session if self.file: if self.is_kdenlive_session(): session = KDEnLiveSession(self.file.path) markers = session.markers(**kwargs) for marker in markers: m = MediaItemMarker(item=self.item) m.public_id = get_random_hash() m.time = float(marker['time']) m.title = marker['comment'] m.save() return markers class Meta(MetaCore): db_table = 'media_item_related' verbose_name = _('item related media') verbose_name_plural = _('item related media')
Ancestors (in MRO)
- MediaItemRelated
- telemeta.models.resource.MediaRelated
- telemeta.models.resource.MediaResource
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var element_type
var item
var objects
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def get_revision(
self)
def get_revision(self): revisions = Revision.objects.filter(element_type=self.element_type, element_id=self.id).order_by('-time') if revisions: return revisions[0] else: return None
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def is_image(
self)
def is_image(self): is_url_image = False if self.url: url_types = ['.png', '.jpg', '.gif', '.jpeg'] for type in url_types: if type in self.url or type.upper() in self.url: is_url_image = True return 'image' in self.mime_type or is_url_image
def is_kdenlive_session(
self)
def is_kdenlive_session(self): if self.file: return '.kdenlive' in self.file.path else: return False
def parse_markers(
self, **kwargs)
def parse_markers(self, **kwargs): # Parse KDEnLive session if self.file: if self.is_kdenlive_session(): session = KDEnLiveSession(self.file.path) markers = session.markers(**kwargs) for marker in markers: m = MediaItemMarker(item=self.item) m.public_id = get_random_hash() m.time = float(marker['time']) m.title = marker['comment'] m.save() return markers
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def public_access_label(
self)
def public_access_label(self): if self.public_access == 'metadata': return _('Metadata only') elif self.public_access == 'full': return _('Sound and metadata') return _('Private data')
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, *args, **kwargs)
def save(self, *args, **kwargs): super(MediaRelated, self).save(*args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def set_mime_type(
self)
def set_mime_type(self): if self.file: self.mime_type = mimetypes.guess_type(self.file.path)[0]
def set_revision(
self, user)
Save a media object and add a revision
def set_revision(self, user): "Save a media object and add a revision" Revision.touch(self, user)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemTranscoded
Item file transcoded
class MediaItemTranscoded(MediaResource): "Item file transcoded" element_type = 'transcoded item' item = models.ForeignKey('MediaItem', related_name="transcoded", verbose_name=_('item')) mimetype = models.CharField(_('mime_type'), max_length=255, blank=True) date_added = DateTimeField(_('date'), auto_now_add=True) status = models.IntegerField(_('status'), choices=ITEM_TRANSODING_STATUS, default=1) file = models.FileField(_('file'), upload_to='items/%Y/%m/%d', max_length=1024, blank=True) @property def mime_type(self): if not self.mimetype: if self.file: if os.path.exists(self.file.path): self.mimetype = mimetypes.guess_type(self.file.path)[0] self.save() return self.mimetype else: return 'none' else: return 'none' else: return self.mimetype def __unicode__(self): if self.item.title: return self.item.title + ' - ' + self.mime_type else: return self.item.public_id + ' - ' + self.mime_type class Meta(MetaCore): db_table = app_name + '_media_transcoded'
Ancestors (in MRO)
- MediaItemTranscoded
- telemeta.models.resource.MediaResource
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var date_added
var element_type
var file
var item
var mimetype
var objects
var status
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var mime_type
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def get_revision(
self)
def get_revision(self): revisions = Revision.objects.filter(element_type=self.element_type, element_id=self.id).order_by('-time') if revisions: return revisions[0] else: return None
def get_status_display(
*moreargs, **morekwargs)
def _curried(*moreargs, **morekwargs): return _curried_func(*(args + moreargs), **dict(kwargs, **morekwargs))
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def public_access_label(
self)
def public_access_label(self): if self.public_access == 'metadata': return _('Metadata only') elif self.public_access == 'full': return _('Sound and metadata') return _('Private data')
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def set_revision(
self, user)
Save a media object and add a revision
def set_revision(self, user): "Save a media object and add a revision" Revision.touch(self, user)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaItemTranscodingFlag
Item flag to know if the MediaItem has been transcoded to a given format
class MediaItemTranscodingFlag(ModelCore): "Item flag to know if the MediaItem has been transcoded to a given format" item = ForeignKey('MediaItem', related_name="transcoding", verbose_name=_('item')) mime_type = CharField(_('mime_type'), required=True) date = DateTimeField(_('date'), auto_now=True) value = BooleanField(_('transcoded')) class Meta(MetaCore): db_table = 'media_transcoding'
Ancestors (in MRO)
- MediaItemTranscodingFlag
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var date
var item
var mime_type
var objects
var value
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
class MediaPart
Describe an item part
class MediaPart(MediaResource): "Describe an item part" element_type = 'part' item = ForeignKey('MediaItem', related_name="parts", verbose_name=_('item')) title = CharField(_('title'), required=True) start = FloatField(_('start'), required=True) end = FloatField(_('end'), required=True) class Meta(MetaCore): db_table = 'media_parts' verbose_name = _('item part') def __unicode__(self): return self.title
Ancestors (in MRO)
- MediaPart
- telemeta.models.resource.MediaResource
- telemeta.models.core.ModelCore
- telemeta.models.core.EnhancedModel
- django.db.models.base.Model
- dirtyfields.dirtyfields.DirtyFieldsMixin
- __builtin__.object
Class variables
var DoesNotExist
var ENABLE_M2M_CHECK
var Meta
var MultipleObjectsReturned
var compare_function
var element_type
var end
var item
var objects
var start
var title
Static methods
def get_dom_field_name(
field_name)
Convert the class name to a DOM element name
@staticmethod def get_dom_field_name(field_name): "Convert the class name to a DOM element name" tokens = field_name.split('_') name = tokens[0] for t in tokens[1:]: name += t[0].upper() + t[1:] return name
Instance variables
var pk
Methods
def __init__(
self, *args, **kwargs)
def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self)
def clean(
self)
Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.
def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass
def clean_fields(
self, exclude=None)
Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.
def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors)
def date_error_message(
self, lookup_type, field, unique_for)
def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _("%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, }
def delete(
self)
def delete(self): if not self.pk: raise Exception("Can't delete without a primary key") self.__class__.objects.filter(pk=self.pk).delete()
def field_label(
cls, field_name=None)
@classmethod def field_label(cls, field_name=None): if field_name: try: return cls._meta.get_field(field_name).verbose_name except FieldDoesNotExist: try: return getattr(cls, field_name).verbose_name except AttributeError: return field_name else: return cls._meta.verbose_name
def full_clean(
self, exclude=None, validate_unique=True)
Calls clean_fields, clean, and validate_unique, on the model,
and raises a ValidationError
for any errors that occurred.
def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
def get_dirty_fields(
self, check_relationship=False, check_m2m=None, verbose=False)
def get_dirty_fields(self, check_relationship=False, check_m2m=None, verbose=False): if self._state.adding: # If the object has not yet been saved in the database, all fields are considered dirty # for consistency (see https://github.com/romgar/django-dirtyfields/issues/65 for more details) pk_specified = self.pk is not None initial_dict = self._as_dict(check_relationship, include_primary_key=pk_specified) return initial_dict if check_m2m is not None and not self.ENABLE_M2M_CHECK: raise ValueError("You can't check m2m fields if ENABLE_M2M_CHECK is set to False") modified_fields = compare_states(self._as_dict(check_relationship), self._original_state, self.compare_function) if check_m2m: modified_m2m_fields = compare_states(check_m2m, self._original_m2m_state, self.compare_function) modified_fields.update(modified_m2m_fields) if not verbose: # Keeps backward compatibility with previous function return modified_fields = {key: value['saved'] for key, value in modified_fields.items()} return modified_fields
def get_dom_name(
cls)
Convert the class name to a DOM element name
@classmethod def get_dom_name(cls): "Convert the class name to a DOM element name" clsname = cls.__name__ return clsname[0].lower() + clsname[1:]
def get_revision(
self)
def get_revision(self): revisions = Revision.objects.filter(element_type=self.element_type, element_id=self.id).order_by('-time') if revisions: return revisions[0] else: return None
def is_dirty(
self, check_relationship=False, check_m2m=None)
def is_dirty(self, check_relationship=False, check_m2m=None): return {} != self.get_dirty_fields(check_relationship=check_relationship, check_m2m=check_m2m)
def prepare_database_save(
self, unused)
def prepare_database_save(self, unused): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return self.pk
def public_access_label(
self)
def public_access_label(self): if self.public_access == 'metadata': return _('Metadata only') elif self.public_access == 'full': return _('Sound and metadata') return _('Private data')
def required_fields(
cls)
@classmethod def required_fields(cls): required = [] for field in cls._meta.fields: if not field.blank: required.append(field) return required
def save(
self, force_insert=False, force_update=False, *args, **kwargs)
def save(self, force_insert=False, force_update=False, *args, **kwargs): required = self.required_fields() for field in required: if not getattr(self, field.name): raise RequiredFieldError(self, field) super(ModelCore, self).save(force_insert, force_update, *args, **kwargs)
def save_base(
self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None)
Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending.
The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading.
def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.commit_on_success_unless_managed(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using)
def save_dirty_fields(
self)
def save_dirty_fields(self): dirty_fields = self.get_dirty_fields(check_relationship=True) save_specific_fields(self, dirty_fields)
def serializable_value(
self, field_name)
Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly.
Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.
def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
def set_revision(
self, user)
Save a media object and add a revision
def set_revision(self, user): "Save a media object and add a revision" Revision.touch(self, user)
def to_dict(
self)
Return model fields as a dict of name/value pairs
def to_dict(self): "Return model fields as a dict of name/value pairs" fields_dict = {} for field in self._meta.fields: fields_dict[field.name] = getattr(self, field.name) return fields_dict
def to_dom(
self)
Return the DOM representation of this media object
def to_dom(self): "Return the DOM representation of this media object" impl = getDOMImplementation() root = self.get_dom_name() doc = impl.createDocument(None, root, None) top = doc.documentElement top.setAttribute("id", str(self.pk)) fields = self.to_dict() for name, value in fields.iteritems(): element = doc.createElement(self.get_dom_field_name(name)) if isinstance(value, EnhancedModel): element.setAttribute('key', str(value.pk)) value = unicode(value) element.appendChild(doc.createTextNode(value)) top.appendChild(element) return doc
def to_list(
self)
Return model fields as a list
def to_list(self): "Return model fields as a list" fields_list = [] for field in self._meta.fields: fields_list.append({'name': field.name, 'value': unicode(getattr(self, field.name))}) return fields_list
def unique_error_message(
self, model_class, unique_check)
def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] field_labels = get_text_list(field_labels, _('and')) return _("%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) }
def validate_unique(
self, exclude=None)
Checks unique constraints on the model and raises ValidationError
if any failed.
def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)