desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Called when serializing of an object ends.'
def end_object(self, obj):
pass
'Called to handle each individual (non-relational) field on an object.'
def handle_field(self, obj, field):
raise NotImplementedError
'Called to handle a ForeignKey field.'
def handle_fk_field(self, obj, field):
raise NotImplementedError
'Called to handle a ManyToManyField.'
def handle_m2m_field(self, obj, field):
raise NotImplementedError
'Return the fully serialized queryset (or None if the output stream is not seekable).'
def getvalue(self):
if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue()
'Init this serializer given a stream or a string'
def __init__(self, stream_or_string, **options):
self.options = options if isinstance(stream_or_string, six.string_types): self.stream = six.StringIO(stream_or_string) else: self.stream = stream_or_string models.get_apps()
'Iteration iterface -- return the next item in the stream'
def __next__(self):
raise NotImplementedError
'Start serialization -- open the XML document and the root element.'
def start_serialization(self):
self.xml = SimplerXMLGenerator(self.stream, self.options.get(u'encoding', settings.DEFAULT_CHARSET)) self.xml.startDocument() self.xml.startElement(u'django-objects', {u'version': u'1.0'})
'End serialization -- end the document.'
def end_serialization(self):
self.indent(0) self.xml.endElement(u'django-objects') self.xml.endDocument()
'Called as each object is handled.'
def start_object(self, obj):
if (not hasattr(obj, u'_meta')): raise base.SerializationError((u'Non-model object (%s) encountered during serialization' % type(obj))) self.indent(1) obj_pk = obj._get_pk_val() if (obj_pk is None): attrs = {u'model': smart_text(obj._meta)} else: attrs = {u'pk': smart_text(obj._get_pk_val()), u'model': smart_text(obj._meta)} self.xml.startElement(u'object', attrs)
'Called after handling all fields for an object.'
def end_object(self, obj):
self.indent(1) self.xml.endElement(u'object')
'Called to handle each field on an object (except for ForeignKeys and ManyToManyFields)'
def handle_field(self, obj, field):
self.indent(2) self.xml.startElement(u'field', {u'name': field.name, u'type': field.get_internal_type()}) if (getattr(obj, field.name) is not None): self.xml.characters(field.value_to_string(obj)) else: self.xml.addQuickElement(u'None') self.xml.endElement(u'field')
'Called to handle a ForeignKey (we need to treat them slightly differently from regular fields).'
def handle_fk_field(self, obj, field):
self._start_relational_field(field) related_att = getattr(obj, field.get_attname()) if (related_att is not None): if (self.use_natural_keys and hasattr(field.rel.to, u'natural_key')): related = getattr(obj, field.name) related = related.natural_key() for key_value in related: self.xml.startElement(u'natural', {}) self.xml.characters(smart_text(key_value)) self.xml.endElement(u'natural') else: self.xml.characters(smart_text(related_att)) else: self.xml.addQuickElement(u'None') self.xml.endElement(u'field')
'Called to handle a ManyToManyField. Related objects are only serialized as references to the object\'s PK (i.e. the related *data* is not dumped, just the relation).'
def handle_m2m_field(self, obj, field):
if field.rel.through._meta.auto_created: self._start_relational_field(field) if (self.use_natural_keys and hasattr(field.rel.to, u'natural_key')): def handle_m2m(value): natural = value.natural_key() self.xml.startElement(u'object', {}) for key_value in natural: self.xml.startElement(u'natural', {}) self.xml.characters(smart_text(key_value)) self.xml.endElement(u'natural') self.xml.endElement(u'object') else: def handle_m2m(value): self.xml.addQuickElement(u'object', attrs={u'pk': smart_text(value._get_pk_val())}) for relobj in getattr(obj, field.name).iterator(): handle_m2m(relobj) self.xml.endElement(u'field')
'Helper to output the <field> element for relational fields'
def _start_relational_field(self, field):
self.indent(2) self.xml.startElement(u'field', {u'name': field.name, u'rel': field.rel.__class__.__name__, u'to': smart_text(field.rel.to._meta)})
'Create a hardened XML parser (no custom/external entities).'
def _make_parser(self):
return DefusedExpatParser()
'Convert an <object> node to a DeserializedObject.'
def _handle_object(self, node):
Model = self._get_model_from_node(node, u'model') if node.hasAttribute(u'pk'): pk = node.getAttribute(u'pk') else: pk = None data = {Model._meta.pk.attname: Model._meta.pk.to_python(pk)} m2m_data = {} for field_node in node.getElementsByTagName(u'field'): field_name = field_node.getAttribute(u'name') if (not field_name): raise base.DeserializationError(u"<field> node is missing the 'name' attribute") field = Model._meta.get_field(field_name) if (field.rel and isinstance(field.rel, models.ManyToManyRel)): m2m_data[field.name] = self._handle_m2m_field_node(field_node, field) elif (field.rel and isinstance(field.rel, models.ManyToOneRel)): data[field.attname] = self._handle_fk_field_node(field_node, field) else: if field_node.getElementsByTagName(u'None'): value = None else: value = field.to_python(getInnerText(field_node).strip()) data[field.name] = value return base.DeserializedObject(Model(**data), m2m_data)
'Handle a <field> node for a ForeignKey'
def _handle_fk_field_node(self, node, field):
if node.getElementsByTagName(u'None'): return None elif hasattr(field.rel.to._default_manager, u'get_by_natural_key'): keys = node.getElementsByTagName(u'natural') if keys: field_value = [getInnerText(k).strip() for k in keys] obj = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value) obj_pk = getattr(obj, field.rel.field_name) if field.rel.to._meta.pk.rel: obj_pk = obj_pk.pk else: field_value = getInnerText(node).strip() obj_pk = field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value) return obj_pk else: field_value = getInnerText(node).strip() return field.rel.to._meta.get_field(field.rel.field_name).to_python(field_value)
'Handle a <field> node for a ManyToManyField.'
def _handle_m2m_field_node(self, node, field):
if hasattr(field.rel.to._default_manager, u'get_by_natural_key'): def m2m_convert(n): keys = n.getElementsByTagName(u'natural') if keys: field_value = [getInnerText(k).strip() for k in keys] obj_pk = field.rel.to._default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk else: obj_pk = field.rel.to._meta.pk.to_python(n.getAttribute(u'pk')) return obj_pk else: m2m_convert = (lambda n: field.rel.to._meta.pk.to_python(n.getAttribute(u'pk'))) return [m2m_convert(c) for c in node.getElementsByTagName(u'object')]
'Helper to look up a model from a <object model=...> or a <field rel=... to=...> node.'
def _get_model_from_node(self, node, attr):
model_identifier = node.getAttribute(attr) if (not model_identifier): raise base.DeserializationError((u"<%s> node is missing the required '%s' attribute" % (node.nodeName, attr))) try: Model = models.get_model(*model_identifier.split(u'.')) except TypeError: Model = None if (Model is None): raise base.DeserializationError((u"<%s> node has invalid model identifier: '%s'" % (node.nodeName, model_identifier))) return Model
'Constructs the key used by all other methods. By default it uses the key_func to generate a key (which, by default, prepends the `key_prefix\' and \'version\'). An different key function can be provided at the time of cache construction; alternatively, you can subclass the cache backend to provide custom key making behavior.'
def make_key(self, key, version=None):
if (version is None): version = self.version new_key = self.key_func(key, self.key_prefix, version) return new_key
'Set a value in the cache if the key does not already exist. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. Returns True if the value was stored, False otherwise.'
def add(self, key, value, timeout=None, version=None):
raise NotImplementedError
'Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None.'
def get(self, key, default=None, version=None):
raise NotImplementedError
'Set a value in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used.'
def set(self, key, value, timeout=None, version=None):
raise NotImplementedError
'Delete a key from the cache, failing silently.'
def delete(self, key, version=None):
raise NotImplementedError
'Fetch a bunch of keys from the cache. For certain backends (memcached, pgsql) this can be *much* faster when fetching multiple values. Returns a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict.'
def get_many(self, keys, version=None):
d = {} for k in keys: val = self.get(k, version=version) if (val is not None): d[k] = val return d
'Returns True if the key is in the cache and has not expired.'
def has_key(self, key, version=None):
return (self.get(key, version=version) is not None)
'Add delta to value in the cache. If the key does not exist, raise a ValueError exception.'
def incr(self, key, delta=1, version=None):
value = self.get(key, version=version) if (value is None): raise ValueError((u"Key '%s' not found" % key)) new_value = (value + delta) self.set(key, new_value, version=version) return new_value
'Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception.'
def decr(self, key, delta=1, version=None):
return self.incr(key, (- delta), version=version)
'Returns True if the key is in the cache and has not expired.'
def __contains__(self, key):
return self.has_key(key)
'Set a bunch of values in the cache at once from a dict of key/value pairs. For certain backends (memcached), this is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used.'
def set_many(self, data, timeout=None, version=None):
for (key, value) in data.items(): self.set(key, value, timeout=timeout, version=version)
'Set a bunch of values in the cache at once. For certain backends (memcached), this is much more efficient than calling delete() multiple times.'
def delete_many(self, keys, version=None):
for key in keys: self.delete(key, version=version)
'Remove *all* values from the cache at once.'
def clear(self):
raise NotImplementedError
'Warn about keys that would not be portable to the memcached backend. This encourages (but does not force) writing backend-portable cache code.'
def validate_key(self, key):
if (len(key) > MEMCACHE_MAX_KEY_LENGTH): warnings.warn((u'Cache key will cause errors if used with memcached: %s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH)), CacheKeyWarning) for char in key: if ((ord(char) < 33) or (ord(char) == 127)): warnings.warn((u'Cache key contains characters that will cause errors if used with memcached: %r' % key), CacheKeyWarning)
'Adds delta to the cache version for the supplied key. Returns the new version.'
def incr_version(self, key, delta=1, version=None):
if (version is None): version = self.version value = self.get(key, version=version) if (value is None): raise ValueError((u"Key '%s' not found" % key)) self.set(key, value, version=(version + delta)) self.delete(key, version=version) return (version + delta)
'Substracts delta from the cache version for the supplied key. Returns the new version.'
def decr_version(self, key, delta=1, version=None):
return self.incr_version(key, (- delta), version)
'Convert the filename into an md5 string. We\'ll turn the first couple bits of the path into directory prefixes to be nice to filesystems that have problems with large numbers of files in a directory. Thus, a cache key of "foo" gets turnned into a file named ``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.'
def _key_to_file(self, key):
path = hashlib.md5(force_bytes(key)).hexdigest() path = os.path.join(path[:2], path[2:4], path[4:]) return os.path.join(self._dir, path)
'Implements transparent thread-safe access to a memcached client.'
@property def _cache(self):
if (getattr(self, '_client', None) is None): self._client = self._lib.Client(self._servers) return self._client
'Memcached deals with long (> 30 days) timeouts in a special way. Call this function to obtain a safe value for your timeout.'
def _get_memcache_timeout(self, timeout):
timeout = (timeout or self.default_timeout) if (timeout > 2592000): timeout += int(time.time()) return int(timeout)
'Validates that the input matches the regular expression.'
def __call__(self, value):
if (not self.regex.search(force_text(value))): raise ValidationError(self.message, code=self.code)
'Create new instance or return current instance Basically this method of construction allows us to short-circuit creation of references to already- referenced instance methods. The key corresponding to the target is calculated, and if there is already an existing reference, that is returned, with its deletionMethods attribute updated. Otherwise the new instance is created and registered in the table of already-referenced methods.'
def __new__(cls, target, onDelete=None, *arguments, **named):
key = cls.calculateKey(target) current = cls._allInstances.get(key) if (current is not None): current.deletionMethods.append(onDelete) return current else: base = super(BoundMethodWeakref, cls).__new__(cls) cls._allInstances[key] = base base.__init__(target, onDelete, *arguments, **named) return base
'Return a weak-reference-like instance for a bound method target -- the instance-method target for the weak reference, must have __self__ and __func__ attributes and be reconstructable via: target.__func__.__get__( target.__self__ ) which is true of built-in instance methods. onDelete -- optional callback which will be called when this weak reference ceases to be valid (i.e. either the object or the function is garbage collected). Should take a single argument, which will be passed a pointer to this object.'
def __init__(self, target, onDelete=None):
def remove(weak, self=self): 'Set self.isDead to true when method or instance is destroyed' methods = self.deletionMethods[:] del self.deletionMethods[:] try: del self.__class__._allInstances[self.key] except KeyError: pass for function in methods: try: if callable(function): function(self) except Exception as e: try: traceback.print_exc() except AttributeError: print ('Exception during saferef %s cleanup function %s: %s' % (self, function, e)) self.deletionMethods = [onDelete] self.key = self.calculateKey(target) self.weakSelf = weakref.ref(target.__self__, remove) self.weakFunc = weakref.ref(target.__func__, remove) self.selfName = str(target.__self__) self.funcName = str(target.__func__.__name__)
'Calculate the reference key for this reference Currently this is a two-tuple of the id()\'s of the target object and the target function respectively.'
def calculateKey(cls, target):
return (id(target.__self__), id(target.__func__))
'Give a friendly representation of the object'
def __str__(self):
return ('%s( %s.%s )' % (self.__class__.__name__, self.selfName, self.funcName))
'Whether we are still a valid reference'
def __bool__(self):
return (self() is not None)
'Compare with another reference'
def __eq__(self, other):
if (not isinstance(other, self.__class__)): return (self.__class__ == type(other)) return (self.key == other.key)
'Return a strong reference to the bound method If the target cannot be retrieved, then will return None, otherwise returns a bound instance method for our object and function. Note: You may call this method any number of times, as it does not invalidate the reference.'
def __call__(self):
target = self.weakSelf() if (target is not None): function = self.weakFunc() if (function is not None): return function.__get__(target) return None
'Return a weak-reference-like instance for a bound method target -- the instance-method target for the weak reference, must have __self__ and __func__ attributes and be reconstructable via: target.__func__.__get__( target.__self__ ) which is true of built-in instance methods. onDelete -- optional callback which will be called when this weak reference ceases to be valid (i.e. either the object or the function is garbage collected). Should take a single argument, which will be passed a pointer to this object.'
def __init__(self, target, onDelete=None):
assert (getattr(target.__self__, target.__name__) == target), ("method %s isn't available as the attribute %s of %s" % (target, target.__name__, target.__self__)) super(BoundNonDescriptorMethodWeakref, self).__init__(target, onDelete)
'Return a strong reference to the bound method If the target cannot be retrieved, then will return None, otherwise returns a bound instance method for our object and function. Note: You may call this method any number of times, as it does not invalidate the reference.'
def __call__(self):
target = self.weakSelf() if (target is not None): function = self.weakFunc() if (function is not None): return getattr(target, function.__name__) return None
'Create a new signal. providing_args A list of the arguments this signal can pass along in a send() call.'
def __init__(self, providing_args=None):
self.receivers = [] if (providing_args is None): providing_args = [] self.providing_args = set(providing_args) self.lock = threading.Lock()
'Connect receiver to sender for signal. Arguments: receiver A function or an instance method which is to receive signals. Receivers must be hashable objects. If weak is True, then receiver must be weak-referencable (more precisely saferef.safeRef() must be able to create a reference to the receiver). Receivers must be able to accept keyword arguments. If receivers have a dispatch_uid attribute, the receiver will not be added if another receiver already exists with that dispatch_uid. sender The sender to which the receiver should respond. Must either be of type Signal, or None to receive events from any sender. weak Whether to use weak references to the receiver. By default, the module will attempt to use weak references to the receiver objects. If this parameter is false, then strong references will be used. dispatch_uid An identifier used to uniquely identify a particular instance of a receiver. This will usually be a string, though it may be anything hashable.'
def connect(self, receiver, sender=None, weak=True, dispatch_uid=None):
from django.conf import settings if settings.DEBUG: import inspect assert callable(receiver), 'Signal receivers must be callable.' try: argspec = inspect.getargspec(receiver) except TypeError: try: argspec = inspect.getargspec(receiver.__call__) except (TypeError, AttributeError): argspec = None if argspec: assert (argspec[2] is not None), 'Signal receivers must accept keyword arguments (**kwargs).' if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) if weak: receiver = saferef.safeRef(receiver, onDelete=self._remove_receiver) with self.lock: for (r_key, _) in self.receivers: if (r_key == lookup_key): break else: self.receivers.append((lookup_key, receiver))
'Disconnect receiver from sender for signal. If weak references are used, disconnect need not be called. The receiver will be remove from dispatch automatically. Arguments: receiver The registered receiver to disconnect. May be none if dispatch_uid is specified. sender The registered sender to disconnect weak The weakref state to disconnect dispatch_uid the unique identifier of the receiver to disconnect'
def disconnect(self, receiver=None, sender=None, weak=True, dispatch_uid=None):
if dispatch_uid: lookup_key = (dispatch_uid, _make_id(sender)) else: lookup_key = (_make_id(receiver), _make_id(sender)) with self.lock: for index in xrange(len(self.receivers)): (r_key, _) = self.receivers[index] if (r_key == lookup_key): del self.receivers[index] break
'Send signal from sender to all connected receivers. If any receiver raises an error, the error propagates back through send, terminating the dispatch loop, so it is quite possible to not have all receivers called if a raises an error. Arguments: sender The sender of the signal Either a specific object or None. named Named arguments which will be passed to receivers. Returns a list of tuple pairs [(receiver, response), ... ].'
def send(self, sender, **named):
responses = [] if (not self.receivers): return responses for receiver in self._live_receivers(_make_id(sender)): response = receiver(signal=self, sender=sender, **named) responses.append((receiver, response)) return responses
'Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver.'
def send_robust(self, sender, **named):
responses = [] if (not self.receivers): return responses for receiver in self._live_receivers(_make_id(sender)): try: response = receiver(signal=self, sender=sender, **named) except Exception as err: responses.append((receiver, err)) else: responses.append((receiver, response)) return responses
'Filter sequence of receivers to get resolved, live receivers. This checks for weak references and resolves them, then returning only live receivers.'
def _live_receivers(self, senderkey):
none_senderkey = _make_id(None) receivers = [] for ((receiverkey, r_senderkey), receiver) in self.receivers: if ((r_senderkey == none_senderkey) or (r_senderkey == senderkey)): if isinstance(receiver, WEAKREF_TYPES): receiver = receiver() if (receiver is not None): receivers.append(receiver) else: receivers.append(receiver) return receivers
'Remove dead receivers from connections.'
def _remove_receiver(self, receiver):
with self.lock: to_remove = [] for (key, connected_receiver) in self.receivers: if (connected_receiver == receiver): to_remove.append(key) for key in to_remove: last_idx = (len(self.receivers) - 1) for (idx, (r_key, _)) in enumerate(reversed(self.receivers)): if (r_key == key): del self.receivers[(last_idx - idx)]
'Creates some methods once self._meta has been populated.'
def _prepare(cls):
opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False) def make_foreign_order_accessors(field, model, cls): setattr(field.rel.to, (u'get_%s_order' % cls.__name__.lower()), curry(method_get_order, cls)) setattr(field.rel.to, (u'set_%s_order' % cls.__name__.lower()), curry(method_set_order, cls)) add_lazy_relation(cls, opts.order_with_respect_to, opts.order_with_respect_to.rel.to, make_foreign_order_accessors) if (cls.__doc__ is None): cls.__doc__ = (u'%s(%s)' % (cls.__name__, u', '.join([f.attname for f in opts.fields]))) if hasattr(cls, u'get_absolute_url'): cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url), cls.get_absolute_url) signals.class_prepared.send(sender=cls)
'Provides pickling support. Normally, this just dispatches to Python\'s standard handling. However, for models with deferred field loading, we need to do things manually, as they\'re dynamically created classes and only module-level classes can be pickled by the default path.'
def __reduce__(self):
if (not self._deferred): return super(Model, self).__reduce__() data = self.__dict__ defers = [] for field in self._meta.fields: if isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute): defers.append(field.attname) model = self._meta.proxy_for_model return (model_unpickle, (model, defers), data)
'Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there\'s no Field object with this name on the model, the model attribute\'s value is returned directly. Used to serialize a field\'s value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method.'
def serializable_value(self, field_name):
try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname)
'Saves the current instance. Override this in a subclass if you want to control the saving process. The \'force_insert\' and \'force_update\' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set.'
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
using = (using or router.db_for_write(self.__class__, instance=self)) if (force_insert and (force_update or update_fields)): raise ValueError(u'Cannot force both insert and updating in model saving.') if (update_fields is not None): if (len(update_fields) == 0): return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if (not field.primary_key): field_names.add(field.name) if (field.name != field.attname): field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError((u'The following fields do not exist in this model or are m2m fields: %s' % u', '.join(non_model_fields))) elif ((not force_insert) and self._deferred and (using == self._state.db)): field_names = set() for field in self._meta.fields: if ((not field.primary_key) and (not hasattr(field, u'through'))): field_names.add(field.attname) deferred_fields = [f.attname for f in self._meta.fields if ((f.attname not in self.__dict__) and isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))] loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields)
'Does the heavy-lifting involved in saving. Subclasses shouldn\'t need to override this method. It\'s separate from save() in order to hide the need for overrides of save() to pass around internal-only parameters (\'raw\', \'cls\', and \'origin\').'
def save_base(self, raw=False, cls=None, origin=None, force_insert=False, force_update=False, using=None, update_fields=None):
using = (using or router.db_for_write(self.__class__, instance=self)) assert (not (force_insert and (force_update or update_fields))) assert ((update_fields is None) or (len(update_fields) > 0)) if (cls is None): cls = self.__class__ meta = cls._meta if (not meta.proxy): origin = cls else: meta = cls._meta if (origin and (not meta.auto_created)): signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) if ((not raw) or meta.proxy): if meta.proxy: org = cls else: org = None for (parent, field) in meta.parents.items(): if (field and (getattr(self, parent._meta.pk.attname) is None) and (getattr(self, field.attname) is not None)): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self.save_base(cls=parent, origin=org, using=using, update_fields=update_fields) if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) cache_name = field.get_cache_name() if hasattr(self, cache_name): delattr(self, cache_name) if meta.proxy: return if (not meta.proxy): non_pks = [f for f in meta.local_fields if (not f.primary_key)] if update_fields: non_pks = [f for f in non_pks if ((f.name in update_fields) or (f.attname in update_fields))] pk_val = self._get_pk_val(meta) pk_set = (pk_val is not None) record_exists = True manager = cls._base_manager if pk_set: if ((force_update or update_fields) or ((not force_insert) and manager.using(using).filter(pk=pk_val).exists())): if (force_update or non_pks): values = [(f, None, ((raw and getattr(self, f.attname)) or f.pre_save(self, False))) for f in non_pks] if values: rows = manager.using(using).filter(pk=pk_val)._update(values) if (force_update and (not rows)): raise DatabaseError(u'Forced update did not affect any rows.') if (update_fields and (not rows)): raise DatabaseError(u'Save with update_fields did not affect any rows.') else: record_exists = False if ((not pk_set) or (not record_exists)): if meta.order_with_respect_to: field = meta.order_with_respect_to order_value = manager.using(using).filter(**{field.name: getattr(self, field.attname)}).count() self._order = order_value fields = meta.local_fields if (not pk_set): if (force_update or update_fields): raise ValueError(u'Cannot force an update in save() with no primary key.') fields = [f for f in fields if (not isinstance(f, AutoField))] record_exists = False update_pk = bool((meta.has_auto_field and (not pk_set))) result = manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw) if update_pk: setattr(self, meta.pk.attname, result) transaction.commit_unless_managed(using=using) self._state.db = using self._state.adding = False if (origin and (not meta.auto_created)): signals.post_save.send(sender=origin, instance=self, created=(not record_exists), update_fields=update_fields, raw=raw, using=using)
'Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS.'
def clean(self):
pass
'Checks unique constraints on the model and raises ``ValidationError`` if any failed.'
def validate_unique(self, exclude=None):
(unique_checks, date_checks) = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for (k, v) in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors)
'Gather a list of checks to perform. Since validate_unique could be called from a ModelForm, some fields may have been excluded; we can\'t perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument.'
def _get_unique_checks(self, exclude=None):
if (exclude is None): exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] for parent_class in self._meta.parents.keys(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) for (model_class, unique_together) in unique_togethers: for check in unique_together: for name in check: if (name in exclude): break else: unique_checks.append((model_class, tuple(check))) date_checks = [] fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.parents.keys(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for (model_class, fields) in fields_with_class: for f in fields: name = f.name if (name in exclude): continue if f.unique: unique_checks.append((model_class, (name,))) if (f.unique_for_date and (f.unique_for_date not in exclude)): date_checks.append((model_class, u'date', name, f.unique_for_date)) if (f.unique_for_year and (f.unique_for_year not in exclude)): date_checks.append((model_class, u'year', name, f.unique_for_year)) if (f.unique_for_month and (f.unique_for_month not in exclude)): date_checks.append((model_class, u'month', name, f.unique_for_month)) return (unique_checks, date_checks)
'Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occured.'
def full_clean(self, exclude=None):
errors = {} if (exclude is None): exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) for name in errors.keys(): if ((name != NON_FIELD_ERRORS) and (name not in exclude)): exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors)
'Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur.'
def clean_fields(self, exclude=None):
if (exclude is None): exclude = [] errors = {} for f in self._meta.fields: if (f.name in exclude): continue raw_value = getattr(self, f.attname) if (f.blank and (raw_value in validators.EMPTY_VALUES)): continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.messages if errors: raise ValidationError(errors)
'Deep copy of a QuerySet doesn\'t populate the cache'
def __deepcopy__(self, memo):
obj = self.__class__() for (k, v) in self.__dict__.items(): if (k in ('_iter', '_result_cache')): obj.__dict__[k] = None else: obj.__dict__[k] = copy.deepcopy(v, memo) return obj
'Allows the QuerySet to be pickled.'
def __getstate__(self):
len(self) obj_dict = self.__dict__.copy() obj_dict['_iter'] = None return obj_dict
'Retrieves an item or slice from the set of results.'
def __getitem__(self, k):
if (not isinstance(k, ((slice,) + six.integer_types))): raise TypeError assert (((not isinstance(k, slice)) and (k >= 0)) or (isinstance(k, slice) and ((k.start is None) or (k.start >= 0)) and ((k.stop is None) or (k.stop >= 0)))), 'Negative indexing is not supported.' if (self._result_cache is not None): if (self._iter is not None): if isinstance(k, slice): if (k.stop is not None): bound = int(k.stop) else: bound = None else: bound = (k + 1) if (len(self._result_cache) < bound): self._fill_cache((bound - len(self._result_cache))) return self._result_cache[k] if isinstance(k, slice): qs = self._clone() if (k.start is not None): start = int(k.start) else: start = None if (k.stop is not None): stop = int(k.stop) else: stop = None qs.query.set_limits(start, stop) return ((k.step and list(qs)[::k.step]) or qs) try: qs = self._clone() qs.query.set_limits(k, (k + 1)) return list(qs)[0] except self.model.DoesNotExist as e: raise IndexError(e.args)
'An iterator over the results from applying this QuerySet to the database.'
def iterator(self):
fill_cache = False if connections[self.db].features.supports_select_related: fill_cache = self.query.select_related if isinstance(fill_cache, dict): requested = fill_cache else: requested = None max_depth = self.query.max_depth extra_select = list(self.query.extra_select) aggregate_select = list(self.query.aggregate_select) only_load = self.query.get_loaded_field_names() if (not fill_cache): fields = self.model._meta.fields load_fields = [] if only_load: for (field, model) in self.model._meta.get_fields_with_model(): if (model is None): model = self.model try: if (field.name in only_load[model]): load_fields.append(field.name) except KeyError: load_fields.append(field.name) index_start = len(extra_select) aggregate_start = (index_start + len((load_fields or self.model._meta.fields))) skip = None if (load_fields and (not fill_cache)): skip = set() init_list = [] for field in fields: if (field.name not in load_fields): skip.add(field.attname) else: init_list.append(field.attname) model_cls = deferred_class_factory(self.model, skip) db = self.db model = self.model compiler = self.query.get_compiler(using=db) if fill_cache: klass_info = get_klass_info(model, max_depth=max_depth, requested=requested, only_load=only_load) for row in compiler.results_iter(): if fill_cache: (obj, _) = get_cached_row(row, index_start, db, klass_info, offset=len(aggregate_select)) else: row_data = row[index_start:aggregate_start] if skip: obj = model_cls(**dict(zip(init_list, row_data))) else: obj = model(*row_data) obj._state.db = db obj._state.adding = False if extra_select: for (i, k) in enumerate(extra_select): setattr(obj, k, row[i]) if aggregate_select: for (i, aggregate) in enumerate(aggregate_select): setattr(obj, aggregate, row[(i + aggregate_start)]) if self._known_related_objects: for (field, rel_objs) in self._known_related_objects.items(): pk = getattr(obj, field.get_attname()) try: rel_obj = rel_objs[pk] except KeyError: pass else: setattr(obj, field.name, rel_obj) (yield obj)
'Returns a dictionary containing the calculations (aggregation) over the current queryset If args is present the expression is passed as a kwarg using the Aggregate object\'s default alias.'
def aggregate(self, *args, **kwargs):
if self.query.distinct_fields: raise NotImplementedError('aggregate() + distinct(fields) not implemented.') for arg in args: kwargs[arg.default_alias] = arg query = self.query.clone() for (alias, aggregate_expr) in kwargs.items(): query.add_aggregate(aggregate_expr, self.model, alias, is_summary=True) return query.get_aggregation(using=self.db)
'Performs a SELECT COUNT() and returns the number of records as an integer. If the QuerySet is already fully cached this simply returns the length of the cached results set to avoid multiple SELECT COUNT(*) calls.'
def count(self):
if ((self._result_cache is not None) and (not self._iter)): return len(self._result_cache) return self.query.get_count(using=self.db)
'Performs the query and returns a single object matching the given keyword arguments.'
def get(self, *args, **kwargs):
clone = self.filter(*args, **kwargs) if self.query.can_filter(): clone = clone.order_by() num = len(clone) if (num == 1): return clone._result_cache[0] if (not num): raise self.model.DoesNotExist(('%s matching query does not exist. Lookup parameters were %s' % (self.model._meta.object_name, kwargs))) raise self.model.MultipleObjectsReturned(('get() returned more than one %s -- it returned %s! Lookup parameters were %s' % (self.model._meta.object_name, num, kwargs)))
'Creates a new object with the given kwargs, saving it to the database and returning the created object.'
def create(self, **kwargs):
obj = self.model(**kwargs) self._for_write = True obj.save(force_insert=True, using=self.db) return obj
'Inserts each of the instances into the database. This does *not* call save() on each of the instances, does not send any pre/post save signals, and does not set the primary key attribute if it is an autoincrement field.'
def bulk_create(self, objs, batch_size=None):
assert ((batch_size is None) or (batch_size > 0)) if self.model._meta.parents: raise ValueError("Can't bulk create an inherited model") if (not objs): return objs self._for_write = True connection = connections[self.db] fields = self.model._meta.local_fields if (not transaction.is_managed(using=self.db)): transaction.enter_transaction_management(using=self.db) forced_managed = True else: forced_managed = False try: if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk and self.model._meta.has_auto_field): self._batched_insert(objs, fields, batch_size) else: (objs_with_pk, objs_without_pk) = partition((lambda o: (o.pk is None)), objs) if objs_with_pk: self._batched_insert(objs_with_pk, fields, batch_size) if objs_without_pk: fields = [f for f in fields if (not isinstance(f, AutoField))] self._batched_insert(objs_without_pk, fields, batch_size) if forced_managed: transaction.commit(using=self.db) else: transaction.commit_unless_managed(using=self.db) finally: if forced_managed: transaction.leave_transaction_management(using=self.db) return objs
'Looks up an object with the given kwargs, creating one if necessary. Returns a tuple of (object, created), where created is a boolean specifying whether an object was created.'
def get_or_create(self, **kwargs):
assert kwargs, 'get_or_create() must be passed at least one keyword argument' defaults = kwargs.pop('defaults', {}) lookup = kwargs.copy() for f in self.model._meta.fields: if (f.attname in lookup): lookup[f.name] = lookup.pop(f.attname) try: self._for_write = True return (self.get(**lookup), False) except self.model.DoesNotExist: try: params = dict([(k, v) for (k, v) in kwargs.items() if ('__' not in k)]) params.update(defaults) obj = self.model(**params) sid = transaction.savepoint(using=self.db) obj.save(force_insert=True, using=self.db) transaction.savepoint_commit(sid, using=self.db) return (obj, True) except IntegrityError as e: transaction.savepoint_rollback(sid, using=self.db) exc_info = sys.exc_info() try: return (self.get(**lookup), False) except self.model.DoesNotExist: six.reraise(*exc_info)
'Returns the latest object, according to the model\'s \'get_latest_by\' option or optional given field_name.'
def latest(self, field_name=None):
latest_by = (field_name or self.model._meta.get_latest_by) assert bool(latest_by), "latest() requires either a field_name parameter or 'get_latest_by' in the model" assert self.query.can_filter(), 'Cannot change a query once a slice has been taken.' obj = self._clone() obj.query.set_limits(high=1) obj.query.clear_ordering() obj.query.add_ordering(('-%s' % latest_by)) return obj.get()
'Returns a dictionary mapping each of the given IDs to the object with that ID.'
def in_bulk(self, id_list):
assert self.query.can_filter(), "Cannot use 'limit' or 'offset' with in_bulk" if (not id_list): return {} qs = self.filter(pk__in=id_list).order_by() return dict([(obj._get_pk_val(), obj) for obj in qs])
'Deletes the records in the current QuerySet.'
def delete(self):
assert self.query.can_filter(), "Cannot use 'limit' or 'offset' with delete." del_query = self._clone() del_query._for_write = True del_query.query.select_for_update = False del_query.query.select_related = False del_query.query.clear_ordering(force_empty=True) collector = Collector(using=del_query.db) collector.collect(del_query) collector.delete() self._result_cache = None
'Deletes objects found from the given queryset in single direct SQL query. No signals are sent, and there is no protection for cascades.'
def _raw_delete(self, using):
sql.DeleteQuery(self.model).delete_qs(self, using)
'Updates all elements in the current QuerySet, setting all the given fields to the appropriate values.'
def update(self, **kwargs):
assert self.query.can_filter(), 'Cannot update a query once a slice has been taken.' self._for_write = True query = self.query.clone(sql.UpdateQuery) query.add_update_values(kwargs) if (not transaction.is_managed(using=self.db)): transaction.enter_transaction_management(using=self.db) forced_managed = True else: forced_managed = False try: rows = query.get_compiler(self.db).execute_sql(None) if forced_managed: transaction.commit(using=self.db) else: transaction.commit_unless_managed(using=self.db) finally: if forced_managed: transaction.leave_transaction_management(using=self.db) self._result_cache = None return rows
'A version of update that accepts field objects instead of field names. Used primarily for model saving and not intended for use by general code (it requires too much poking around at model internals to be useful at that level).'
def _update(self, values):
assert self.query.can_filter(), 'Cannot update a query once a slice has been taken.' query = self.query.clone(sql.UpdateQuery) query.add_update_fields(values) self._result_cache = None return query.get_compiler(self.db).execute_sql(None)
'Returns a list of datetime objects representing all available dates for the given field_name, scoped to \'kind\'.'
def dates(self, field_name, kind, order='ASC'):
assert (kind in ('month', 'year', 'day')), "'kind' must be one of 'year', 'month' or 'day'." assert (order in ('ASC', 'DESC')), "'order' must be either 'ASC' or 'DESC'." return self._clone(klass=DateQuerySet, setup=True, _field_name=field_name, _kind=kind, _order=order)
'Returns an empty QuerySet.'
def none(self):
return self._clone(klass=EmptyQuerySet)
'Returns a new QuerySet that is a copy of the current one. This allows a QuerySet to proxy for a model manager in some cases.'
def all(self):
return self._clone()
'Returns a new QuerySet instance with the args ANDed to the existing set.'
def filter(self, *args, **kwargs):
return self._filter_or_exclude(False, *args, **kwargs)
'Returns a new QuerySet instance with NOT (args) ANDed to the existing set.'
def exclude(self, *args, **kwargs):
return self._filter_or_exclude(True, *args, **kwargs)
'Returns a new QuerySet instance with filter_obj added to the filters. filter_obj can be a Q object (or anything with an add_to_query() method) or a dictionary of keyword lookup arguments. This exists to support framework features such as \'limit_choices_to\', and usually it will be more natural to use other methods.'
def complex_filter(self, filter_obj):
if (isinstance(filter_obj, Q) or hasattr(filter_obj, 'add_to_query')): clone = self._clone() clone.query.add_q(filter_obj) return clone else: return self._filter_or_exclude(None, **filter_obj)
'Returns a new QuerySet instance that will select objects with a FOR UPDATE lock.'
def select_for_update(self, **kwargs):
nowait = kwargs.pop('nowait', False) obj = self._clone() obj.query.select_for_update = True obj.query.select_for_update_nowait = nowait return obj
'Returns a new QuerySet instance that will select related objects. If fields are specified, they must be ForeignKey fields and only those related objects are included in the selection.'
def select_related(self, *fields, **kwargs):
if ('depth' in kwargs): warnings.warn('The "depth" keyword argument has been deprecated.\nUse related field names instead.', PendingDeprecationWarning) depth = kwargs.pop('depth', 0) if kwargs: raise TypeError(('Unexpected keyword arguments to select_related: %s' % (list(kwargs),))) obj = self._clone() if fields: if depth: raise TypeError('Cannot pass both "depth" and fields to select_related()') obj.query.add_select_related(fields) else: obj.query.select_related = True if depth: obj.query.max_depth = depth return obj
'Returns a new QuerySet instance that will prefetch the specified Many-To-One and Many-To-Many related objects when the QuerySet is evaluated. When prefetch_related() is called more than once, the list of lookups to prefetch is appended to. If prefetch_related(None) is called, the the list is cleared.'
def prefetch_related(self, *lookups):
clone = self._clone() if (lookups == (None,)): clone._prefetch_related_lookups = [] else: clone._prefetch_related_lookups.extend(lookups) return clone
'Copies the related selection status from the QuerySet \'other\' to the current QuerySet.'
def dup_select_related(self, other):
self.query.select_related = other.query.select_related
'Return a query set in which the returned objects have been annotated with data aggregated from related fields.'
def annotate(self, *args, **kwargs):
for arg in args: if (arg.default_alias in kwargs): raise ValueError(("The named annotation '%s' conflicts with the default name for another annotation." % arg.default_alias)) kwargs[arg.default_alias] = arg names = getattr(self, '_fields', None) if (names is None): names = set(self.model._meta.get_all_field_names()) for aggregate in kwargs: if (aggregate in names): raise ValueError(("The annotation '%s' conflicts with a field on the model." % aggregate)) obj = self._clone() obj._setup_aggregate_query(list(kwargs)) for (alias, aggregate_expr) in kwargs.items(): obj.query.add_aggregate(aggregate_expr, self.model, alias, is_summary=False) return obj
'Returns a new QuerySet instance with the ordering changed.'
def order_by(self, *field_names):
assert self.query.can_filter(), 'Cannot reorder a query once a slice has been taken.' obj = self._clone() obj.query.clear_ordering() obj.query.add_ordering(*field_names) return obj
'Returns a new QuerySet instance that will select only distinct results.'
def distinct(self, *field_names):
assert self.query.can_filter(), 'Cannot create distinct fields once a slice has been taken.' obj = self._clone() obj.query.add_distinct_fields(*field_names) return obj
'Adds extra SQL fragments to the query.'
def extra(self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None):
assert self.query.can_filter(), 'Cannot change a query once a slice has been taken' clone = self._clone() clone.query.add_extra(select, select_params, where, params, tables, order_by) return clone
'Reverses the ordering of the QuerySet.'
def reverse(self):
clone = self._clone() clone.query.standard_ordering = (not clone.query.standard_ordering) return clone
'Defers the loading of data for certain fields until they are accessed. The set of fields to defer is added to any existing set of deferred fields. The only exception to this is if None is passed in as the only parameter, in which case all deferrals are removed (None acts as a reset option).'
def defer(self, *fields):
clone = self._clone() if (fields == (None,)): clone.query.clear_deferred_loading() else: clone.query.add_deferred_loading(fields) return clone
'Essentially, the opposite of defer. Only the fields passed into this method and that are not already specified as deferred are loaded immediately when the queryset is evaluated.'
def only(self, *fields):
if (fields == (None,)): raise TypeError('Cannot pass None as an argument to only().') clone = self._clone() clone.query.add_immediate_loading(fields) return clone
'Selects which database this QuerySet should excecute its query against.'
def using(self, alias):
clone = self._clone() clone._db = alias return clone