desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Returns a two-elements list with the lower and upper bound to be used with a BETWEEN operator to query a DateField value using a year lookup `value` is an int, containing the looked-up year. By default, it just calls `self.year_lookup_bounds`. Some backends need this hook because on their DB date fields can\'t be compared to values which include a time part.'
def year_lookup_bounds_for_date_field(self, value):
return self.year_lookup_bounds(value)
'Coerce the value returned by the database backend into a consistent type that is compatible with the field type.'
def convert_values(self, value, field):
if (value is None): return value internal_type = field.get_internal_type() if (internal_type == 'FloatField'): return float(value) elif (internal_type and (internal_type.endswith('IntegerField') or (internal_type == 'AutoField'))): return int(value) return value
'Check that the backend supports the provided aggregate This is used on specific backends to rule out known aggregates that are known to have faulty implementations. If the named aggregate function has a known problem, the backend should raise NotImplementedError.'
def check_aggregate_support(self, aggregate_func):
pass
'Combine a list of subexpressions into a single expression, using the provided connecting operator. This is required because operators can vary between backends (e.g., Oracle with %% and &) and between subexpression types (e.g., date expressions)'
def combine_expression(self, connector, sub_expressions):
conn = (' %s ' % connector) return conn.join(sub_expressions)
'Allow modification of insert parameters. Needed for Oracle Spatial backend due to #10888.'
def modify_insert_params(self, placeholders, params):
return params
'Hook for a database backend to use the cursor description to match a Django field type to a database column. For Oracle, the column data_type on its own is insufficient to distinguish between a FloatField and IntegerField, for example.'
def get_field_type(self, data_type, description):
return self.data_types_reverse[data_type]
'Apply a conversion to the name for the purposes of comparison. The default table name converter is for case sensitive comparison.'
def table_name_converter(self, name):
return name
'Returns a list of names of all tables that exist in the database. The returned table list is sorted by Python\'s default sorting. We do NOT use database\'s ORDER BY here to avoid subtle differences in sorting order between databases.'
def table_names(self, cursor=None):
if (cursor is None): cursor = self.connection.cursor() return sorted(self.get_table_list(cursor))
'Returns an unsorted list of names of all tables that exist in the database.'
def get_table_list(self, cursor):
raise NotImplementedError
'Returns a list of all table names that have associated Django models and are in INSTALLED_APPS. If only_existing is True, the resulting list will only include the tables that actually exist in the database.'
def django_table_names(self, only_existing=False):
from django.db import models, router tables = set() for app in models.get_apps(): for model in models.get_models(app): if (not model._meta.managed): continue if (not router.allow_syncdb(self.connection.alias, model)): continue tables.add(model._meta.db_table) tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many]) tables = list(tables) if only_existing: existing_tables = self.table_names() tables = [t for t in tables if (self.table_name_converter(t) in existing_tables)] return tables
'Returns a set of all models represented by the provided list of table names.'
def installed_models(self, tables):
from django.db import models, router all_models = [] for app in models.get_apps(): for model in models.get_models(app): if router.allow_syncdb(self.connection.alias, model): all_models.append(model) tables = list(map(self.table_name_converter, tables)) return set([m for m in all_models if (self.table_name_converter(m._meta.db_table) in tables)])
'Returns a list of information about all DB sequences for all models in all apps.'
def sequence_list(self):
from django.db import models, router apps = models.get_apps() sequence_list = [] for app in apps: for model in models.get_models(app): if (not model._meta.managed): continue if model._meta.swapped: continue if (not router.allow_syncdb(self.connection.alias, model)): continue for f in model._meta.local_fields: if isinstance(f, models.AutoField): sequence_list.append({'table': model._meta.db_table, 'column': f.column}) break for f in model._meta.local_many_to_many: if (f.rel.through is None): sequence_list.append({'table': f.m2m_db_table(), 'column': None}) return sequence_list
'Backends can override this to return a list of (column_name, referenced_table_name, referenced_column_name) for all key columns in given table.'
def get_key_columns(self, cursor, table_name):
raise NotImplementedError
'Returns the name of the primary key column for the given table.'
def get_primary_key_column(self, cursor, table_name):
for column in six.iteritems(self.get_indexes(cursor, table_name)): if column[1]['primary_key']: return column[0] return None
'Returns a dictionary of indexed fieldname -> infodict for the given table, where each infodict is in the format: {\'primary_key\': boolean representing whether it\'s the primary key, \'unique\': boolean representing whether it\'s a unique index} Only single-column indexes are introspected.'
def get_indexes(self, cursor, table_name):
raise NotImplementedError
'By default, there is no backend-specific validation'
def validate_field(self, errors, opts, f):
pass
'Confirm support for STDDEV and related stats functions SQLite supports STDDEV as an extension package; so connection.ops.check_aggregate_support() can\'t unilaterally rule out support for STDDEV. We need to manually check whether the call works.'
@cached_property def supports_stddev(self):
cursor = self.connection.cursor() cursor.execute(u'CREATE TABLE STDDEV_TEST (X INT)') try: cursor.execute(u'SELECT STDDEV(*) FROM STDDEV_TEST') has_support = True except utils.DatabaseError: has_support = False cursor.execute(u'DROP TABLE STDDEV_TEST') return has_support
'SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of 999 variables per query. If there is just single field to insert, then we can hit another limit, SQLITE_MAX_COMPOUND_SELECT which defaults to 500.'
def bulk_batch_size(self, fields, objs):
limit = (999 if (len(fields) > 1) else 500) return ((limit // len(fields)) if (len(fields) > 0) else len(objs))
'SQLite returns floats when it should be returning decimals, and gets dates and datetimes wrong. For consistency with other backends, coerce when required.'
def convert_values(self, value, field):
internal_type = field.get_internal_type() if (internal_type == u'DecimalField'): return util.typecast_decimal(field.format_number(value)) elif ((internal_type and internal_type.endswith(u'IntegerField')) or (internal_type == u'AutoField')): return int(value) elif (internal_type == u'DateField'): return parse_date(value) elif (internal_type == u'DateTimeField'): return parse_datetime_with_timezone_support(value) elif (internal_type == u'TimeField'): return parse_time(value) return value
'Checks each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides detailed information about the invalid reference in the error message. Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")'
def check_constraints(self, table_names=None):
cursor = self.cursor() if (table_names is None): table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if (not primary_key_column_name): continue key_columns = self.introspection.get_key_columns(cursor, table_name) for (column_name, referenced_table_name, referenced_column_name) in key_columns: cursor.execute((u'\n SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING\n LEFT JOIN `%s` as REFERRED\n ON (REFERRING.`%s` = REFERRED.`%s`)\n WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL' % (primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name))) for bad_row in cursor.fetchall(): raise utils.IntegrityError((u"The row in table '%s' with primary key '%s' has an invalid foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)))
'SQLite3 doesn\'t support constraints'
def sql_for_pending_references(self, model, style, pending_references):
return []
'SQLite3 doesn\'t support constraints'
def sql_remove_table_constraints(self, model, references_to_delete, style):
return []
'Returns a tuple that uniquely identifies a test database. This takes into account the special cases of ":memory:" and "" for SQLite since the databases will be distinct despite having the same TEST_NAME. See http://www.sqlite.org/inmemorydb.html'
def test_db_signature(self):
settings_dict = self.connection.settings_dict test_dbname = self._get_test_db_name() sig = [self.connection.settings_dict['NAME']] if (test_dbname == ':memory:'): sig.append(self.connection.alias) return tuple(sig)
'Returns a list of table names in the current database.'
def get_table_list(self, cursor):
cursor.execute("\n SELECT name FROM sqlite_master\n WHERE type='table' AND NOT name='sqlite_sequence'\n ORDER BY name") return [row[0] for row in cursor.fetchall()]
'Returns a description of the table, with the DB-API cursor.description interface.'
def get_table_description(self, cursor, table_name):
return [(info['name'], info['type'], None, info['size'], None, None, info['null_ok']) for info in self._table_info(cursor, table_name)]
'Returns a dictionary of {field_index: (field_index_other_table, other_table)} representing all relationships to the given table. Indexes are 0-based.'
def get_relations(self, cursor, table_name):
relations = {} cursor.execute('SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s', [table_name, 'table']) results = cursor.fetchone()[0].strip() results = results[(results.index('(') + 1):results.rindex(')')] for (field_index, field_desc) in enumerate(results.split(',')): field_desc = field_desc.strip() if field_desc.startswith('UNIQUE'): continue m = re.search('references (.*) \\(["|](.*)["|]\\)', field_desc, re.I) if (not m): continue (table, column) = [s.strip('"') for s in m.groups()] cursor.execute('SELECT sql FROM sqlite_master WHERE tbl_name = %s', [table]) result = cursor.fetchall()[0] other_table_results = result[0].strip() (li, ri) = (other_table_results.index('('), other_table_results.rindex(')')) other_table_results = other_table_results[(li + 1):ri] for (other_index, other_desc) in enumerate(other_table_results.split(',')): other_desc = other_desc.strip() if other_desc.startswith('UNIQUE'): continue name = other_desc.split(' ', 1)[0].strip('"') if (name == column): relations[field_index] = (other_index, table) break return relations
'Returns a list of (column_name, referenced_table_name, referenced_column_name) for all key columns in given table.'
def get_key_columns(self, cursor, table_name):
key_columns = [] cursor.execute('SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s', [table_name, 'table']) results = cursor.fetchone()[0].strip() results = results[(results.index('(') + 1):results.rindex(')')] for (field_index, field_desc) in enumerate(results.split(',')): field_desc = field_desc.strip() if field_desc.startswith('UNIQUE'): continue m = re.search('"(.*)".*references (.*) \\(["|](.*)["|]\\)', field_desc, re.I) if (not m): continue key_columns.append(tuple([s.strip('"') for s in m.groups()])) return key_columns
'Get the column name of the primary key for the given table.'
def get_primary_key_column(self, cursor, table_name):
cursor.execute('SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s', [table_name, 'table']) results = cursor.fetchone()[0].strip() results = results[(results.index('(') + 1):results.rindex(')')] for field_desc in results.split(','): field_desc = field_desc.strip() m = re.search('"(.*)".*PRIMARY KEY$', field_desc) if m: return m.groups()[0] return None
'Internal method used in Django tests. Don\'t rely on this from your code'
@cached_property def _mysql_storage_engine(self):
cursor = self.connection.cursor() cursor.execute(u'CREATE TABLE INTROSPECT_TEST (X INT)') cursor.execute(u"SHOW TABLE STATUS WHERE Name='INTROSPECT_TEST'") result = cursor.fetchone() cursor.execute(u'DROP TABLE INTROSPECT_TEST') return result[1]
'Confirm support for introspected foreign keys'
@cached_property def can_introspect_foreign_keys(self):
return (self._mysql_storage_engine != u'MyISAM')
'"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped columns. If no ordering would otherwise be applied, we don\'t want any implicit sorting going on.'
def force_no_ordering(self):
return [u'NULL']
'Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True, to indicate constraint checks need to be re-enabled.'
def disable_constraint_checking(self):
self.cursor().execute(u'SET foreign_key_checks=0') return True
'Re-enable foreign key checks after they have been disabled.'
def enable_constraint_checking(self):
self.cursor().execute(u'SET foreign_key_checks=1')
'Checks each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides detailed information about the invalid reference in the error message. Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")'
def check_constraints(self, table_names=None):
cursor = self.cursor() if (table_names is None): table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if (not primary_key_column_name): continue key_columns = self.introspection.get_key_columns(cursor, table_name) for (column_name, referenced_table_name, referenced_column_name) in key_columns: cursor.execute((u'\n SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING\n LEFT JOIN `%s` as REFERRED\n ON (REFERRING.`%s` = REFERRED.`%s`)\n WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL' % (primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name))) for bad_row in cursor.fetchall(): raise utils.IntegrityError((u"The row in table '%s' with primary key '%s' has an invalid foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)))
'MySQL has the following field length restriction: No character (varchar) fields can have a length exceeding 255 characters if they have a unique index on them.'
def validate_field(self, errors, opts, f):
from django.db import models varchar_fields = (models.CharField, models.CommaSeparatedIntegerField, models.SlugField) if (isinstance(f, varchar_fields) and (f.max_length > 255) and f.unique): msg = '"%(name)s": %(cls)s cannot have a "max_length" greater than 255 when using "unique=True".' errors.add(opts, (msg % {'name': f.name, 'cls': f.__class__.__name__}))
'All inline references are pending under MySQL'
def sql_for_inline_foreign_key_references(self, field, known_models, style):
return ([], True)
'Returns a list of table names in the current database.'
def get_table_list(self, cursor):
cursor.execute('SHOW TABLES') return [row[0] for row in cursor.fetchall()]
'Returns a description of the table, with the DB-API cursor.description interface."'
def get_table_description(self, cursor, table_name):
cursor.execute('\n SELECT column_name, character_maximum_length FROM information_schema.columns\n WHERE table_name = %s AND table_schema = DATABASE()\n AND character_maximum_length IS NOT NULL', [table_name]) length_map = dict(cursor.fetchall()) cursor.execute(('SELECT * FROM %s LIMIT 1' % self.connection.ops.quote_name(table_name))) return [((line[:3] + (length_map.get(line[0], line[3]),)) + line[4:]) for line in cursor.description]
'Returns a dictionary of {field_name: field_index} for the given table. Indexes are 0-based.'
def _name_to_index(self, cursor, table_name):
return dict([(d[0], i) for (i, d) in enumerate(self.get_table_description(cursor, table_name))])
'Returns a dictionary of {field_index: (field_index_other_table, other_table)} representing all relationships to the given table. Indexes are 0-based.'
def get_relations(self, cursor, table_name):
my_field_dict = self._name_to_index(cursor, table_name) constraints = self.get_key_columns(cursor, table_name) relations = {} for (my_fieldname, other_table, other_field) in constraints: other_field_index = self._name_to_index(cursor, other_table)[other_field] my_field_index = my_field_dict[my_fieldname] relations[my_field_index] = (other_field_index, other_table) return relations
'Returns a list of (column_name, referenced_table_name, referenced_column_name) for all key columns in given table.'
def get_key_columns(self, cursor, table_name):
key_columns = [] cursor.execute('\n SELECT column_name, referenced_table_name, referenced_column_name\n FROM information_schema.key_column_usage\n WHERE table_name = %s\n AND table_schema = DATABASE()\n AND referenced_table_name IS NOT NULL\n AND referenced_column_name IS NOT NULL', [table_name]) key_columns.extend(cursor.fetchall()) return key_columns
'Generates a 32-bit digest of a set of arguments that can be used to shorten identifying names.'
def _digest(self, *args):
h = hashlib.md5() for arg in args: h.update(force_bytes(arg)) return h.hexdigest()[:8]
'Returns the SQL required to create a single model, as a tuple of: (list_of_sql, pending_references_dict)'
def sql_create_model(self, model, style, known_models=set()):
opts = model._meta if ((not opts.managed) or opts.proxy or opts.swapped): return ([], {}) final_output = [] table_output = [] pending_references = {} qn = self.connection.ops.quote_name for f in opts.local_fields: col_type = f.db_type(connection=self.connection) tablespace = (f.db_tablespace or opts.db_tablespace) if (col_type is None): continue field_output = [style.SQL_FIELD(qn(f.column)), style.SQL_COLTYPE(col_type)] null = f.null if (f.empty_strings_allowed and (not f.primary_key) and self.connection.features.interprets_empty_strings_as_nulls): null = True if (not null): field_output.append(style.SQL_KEYWORD('NOT NULL')) if f.primary_key: field_output.append(style.SQL_KEYWORD('PRIMARY KEY')) elif f.unique: field_output.append(style.SQL_KEYWORD('UNIQUE')) if (tablespace and f.unique): tablespace_sql = self.connection.ops.tablespace_sql(tablespace, inline=True) if tablespace_sql: field_output.append(tablespace_sql) if f.rel: (ref_output, pending) = self.sql_for_inline_foreign_key_references(f, known_models, style) if pending: pending_references.setdefault(f.rel.to, []).append((model, f)) else: field_output.extend(ref_output) table_output.append(' '.join(field_output)) for field_constraints in opts.unique_together: table_output.append((style.SQL_KEYWORD('UNIQUE') + (' (%s)' % ', '.join([style.SQL_FIELD(qn(opts.get_field(f).column)) for f in field_constraints])))) full_statement = [(((style.SQL_KEYWORD('CREATE TABLE') + ' ') + style.SQL_TABLE(qn(opts.db_table))) + ' (')] for (i, line) in enumerate(table_output): full_statement.append((' %s%s' % (line, (((i < (len(table_output) - 1)) and ',') or '')))) full_statement.append(')') if opts.db_tablespace: tablespace_sql = self.connection.ops.tablespace_sql(opts.db_tablespace) if tablespace_sql: full_statement.append(tablespace_sql) full_statement.append(';') final_output.append('\n'.join(full_statement)) if opts.has_auto_field: auto_column = (opts.auto_field.db_column or opts.auto_field.name) autoinc_sql = self.connection.ops.autoinc_sql(opts.db_table, auto_column) if autoinc_sql: for stmt in autoinc_sql: final_output.append(stmt) return (final_output, pending_references)
'Return the SQL snippet defining the foreign key reference for a field.'
def sql_for_inline_foreign_key_references(self, field, known_models, style):
qn = self.connection.ops.quote_name if (field.rel.to in known_models): output = [((((((style.SQL_KEYWORD('REFERENCES') + ' ') + style.SQL_TABLE(qn(field.rel.to._meta.db_table))) + ' (') + style.SQL_FIELD(qn(field.rel.to._meta.get_field(field.rel.field_name).column))) + ')') + self.connection.ops.deferrable_sql())] pending = False else: output = [] pending = True return (output, pending)
'Returns any ALTER TABLE statements to add constraints after the fact.'
def sql_for_pending_references(self, model, style, pending_references):
from django.db.backends.util import truncate_name opts = model._meta if ((not opts.managed) or opts.proxy or opts.swapped): return [] qn = self.connection.ops.quote_name final_output = [] if (model in pending_references): for (rel_class, f) in pending_references[model]: rel_opts = rel_class._meta r_table = rel_opts.db_table r_col = f.column table = opts.db_table col = opts.get_field(f.rel.field_name).column r_name = ('%s_refs_%s_%s' % (r_col, col, self._digest(r_table, table))) final_output.append((style.SQL_KEYWORD('ALTER TABLE') + (' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (qn(r_table), qn(truncate_name(r_name, self.connection.ops.max_name_length())), qn(r_col), qn(table), qn(col), self.connection.ops.deferrable_sql())))) del pending_references[model] return final_output
'Returns the CREATE INDEX SQL statements for a single model.'
def sql_indexes_for_model(self, model, style):
if ((not model._meta.managed) or model._meta.proxy or model._meta.swapped): return [] output = [] for f in model._meta.local_fields: output.extend(self.sql_indexes_for_field(model, f, style)) for fs in model._meta.index_together: fields = [model._meta.get_field_by_name(f)[0] for f in fs] output.extend(self.sql_indexes_for_fields(model, fields, style)) return output
'Return the CREATE INDEX SQL statements for a single model field.'
def sql_indexes_for_field(self, model, f, style):
if (f.db_index and (not f.unique)): return self.sql_indexes_for_fields(model, [f], style) else: return []
'Return the DROP TABLE and restraint dropping statements for a single model.'
def sql_destroy_model(self, model, references_to_delete, style):
if ((not model._meta.managed) or model._meta.proxy or model._meta.swapped): return [] qn = self.connection.ops.quote_name output = [('%s %s;' % (style.SQL_KEYWORD('DROP TABLE'), style.SQL_TABLE(qn(model._meta.db_table))))] if (model in references_to_delete): output.extend(self.sql_remove_table_constraints(model, references_to_delete, style)) if model._meta.has_auto_field: ds = self.connection.ops.drop_sequence_sql(model._meta.db_table) if ds: output.append(ds) return output
'Creates a test database, prompting the user for confirmation if the database already exists. Returns the name of the test database created.'
def create_test_db(self, verbosity=1, autoclobber=False):
from django.core.management import call_command test_database_name = self._get_test_db_name() if (verbosity >= 1): test_db_repr = '' if (verbosity >= 2): test_db_repr = (" ('%s')" % test_database_name) print ("Creating test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)) self._create_test_db(verbosity, autoclobber) self.connection.close() self.connection.settings_dict['NAME'] = test_database_name call_command('syncdb', verbosity=max((verbosity - 1), 0), interactive=False, database=self.connection.alias, load_initial_data=False) call_command('flush', verbosity=max((verbosity - 1), 0), interactive=False, database=self.connection.alias) from django.core.cache import get_cache from django.core.cache.backends.db import BaseDatabaseCache for cache_alias in settings.CACHES: cache = get_cache(cache_alias) if isinstance(cache, BaseDatabaseCache): call_command('createcachetable', cache._table, database=self.connection.alias) self.connection.cursor() return test_database_name
'Internal implementation - returns the name of the test DB that will be created. Only useful when called from create_test_db() and _create_test_db() and when no external munging is done with the \'NAME\' or \'TEST_NAME\' settings.'
def _get_test_db_name(self):
if self.connection.settings_dict['TEST_NAME']: return self.connection.settings_dict['TEST_NAME'] return (TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME'])
'Internal implementation - creates the test db tables.'
def _create_test_db(self, verbosity, autoclobber):
suffix = self.sql_table_creation_suffix() test_database_name = self._get_test_db_name() qn = self.connection.ops.quote_name cursor = self.connection.cursor() self._prepare_for_test_db_ddl() try: cursor.execute(('CREATE DATABASE %s %s' % (qn(test_database_name), suffix))) except Exception as e: sys.stderr.write(('Got an error creating the test database: %s\n' % e)) if (not autoclobber): confirm = input(("Type 'yes' if you would like to try deleting the test database '%s', or 'no' to cancel: " % test_database_name)) if (autoclobber or (confirm == 'yes')): try: if (verbosity >= 1): print ("Destroying old test database '%s'..." % self.connection.alias) cursor.execute(('DROP DATABASE %s' % qn(test_database_name))) cursor.execute(('CREATE DATABASE %s %s' % (qn(test_database_name), suffix))) except Exception as e: sys.stderr.write(('Got an error recreating the test database: %s\n' % e)) sys.exit(2) else: print 'Tests cancelled.' sys.exit(1) return test_database_name
'Destroy a test database, prompting the user for confirmation if the database already exists.'
def destroy_test_db(self, old_database_name, verbosity=1):
self.connection.close() test_database_name = self.connection.settings_dict['NAME'] if (verbosity >= 1): test_db_repr = '' if (verbosity >= 2): test_db_repr = (" ('%s')" % test_database_name) print ("Destroying test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)) settings_dict = self.connection.settings_dict.copy() settings_dict['NAME'] = old_database_name backend = load_backend(settings_dict['ENGINE']) new_connection = backend.DatabaseWrapper(settings_dict, alias='__destroy_test_db__', allow_thread_sharing=False) new_connection.creation._destroy_test_db(test_database_name, verbosity)
'Internal implementation - remove the test db tables.'
def _destroy_test_db(self, test_database_name, verbosity):
cursor = self.connection.cursor() self._prepare_for_test_db_ddl() time.sleep(1) cursor.execute(('DROP DATABASE %s' % self.connection.ops.quote_name(test_database_name))) self.connection.close()
'Make sure a connection is in autocommit mode. - Deprecated, not used anymore by Django code. Kept for compatibility with user code that might use it.'
def set_autocommit(self):
pass
'Internal implementation - Hook for tasks that should be performed before the ``CREATE DATABASE``/``DROP DATABASE`` clauses used by testing code to create/ destroy test databases. Needed e.g. in PostgreSQL to rollback and close any active transaction.'
def _prepare_for_test_db_ddl(self):
pass
'SQL to append to the end of the test table creation statements.'
def sql_table_creation_suffix(self):
return ''
'Returns a tuple with elements of self.connection.settings_dict (a DATABASES setting value) that uniquely identify a database accordingly to the RDBMS particularities.'
def test_db_signature(self):
settings_dict = self.connection.settings_dict return (settings_dict['HOST'], settings_dict['PORT'], settings_dict['ENGINE'], settings_dict['NAME'])
'Puts the defaults into the settings dictionary for a given connection where no settings is provided.'
def ensure_defaults(self, alias):
try: conn = self.databases[alias] except KeyError: raise ConnectionDoesNotExist(("The connection %s doesn't exist" % alias)) conn.setdefault('ENGINE', 'django.db.backends.dummy') if ((conn['ENGINE'] == 'django.db.backends.') or (not conn['ENGINE'])): conn['ENGINE'] = 'django.db.backends.dummy' conn.setdefault('OPTIONS', {}) conn.setdefault('TIME_ZONE', ('UTC' if settings.USE_TZ else settings.TIME_ZONE)) for setting in ['NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']: conn.setdefault(setting, '') for setting in ['TEST_CHARSET', 'TEST_COLLATION', 'TEST_NAME', 'TEST_MIRROR']: conn.setdefault(setting, None)
'Check for denied User-Agents and rewrite the URL based on settings.APPEND_SLASH and settings.PREPEND_WWW'
def process_request(self, request):
if ('HTTP_USER_AGENT' in request.META): for user_agent_regex in settings.DISALLOWED_USER_AGENTS: if user_agent_regex.search(request.META['HTTP_USER_AGENT']): logger.warning('Forbidden (User agent): %s', request.path, extra={'status_code': 403, 'request': request}) return http.HttpResponseForbidden('<h1>Forbidden</h1>') host = request.get_host() old_url = [host, request.path] new_url = old_url[:] if (settings.PREPEND_WWW and old_url[0] and (not old_url[0].startswith('www.'))): new_url[0] = ('www.' + old_url[0]) if (settings.APPEND_SLASH and (not old_url[1].endswith('/'))): urlconf = getattr(request, 'urlconf', None) if ((not urlresolvers.is_valid_path(request.path_info, urlconf)) and urlresolvers.is_valid_path(('%s/' % request.path_info), urlconf)): new_url[1] = (new_url[1] + '/') if (settings.DEBUG and (request.method == 'POST')): raise RuntimeError(("You called this URL via POST, but the URL doesn't end in a slash and you have APPEND_SLASH set. Django can't redirect to the slash URL while maintaining POST data. Change your form to point to %s%s (note the trailing slash), or set APPEND_SLASH=False in your Django settings." % (new_url[0], new_url[1]))) if (new_url == old_url): return if new_url[0]: newurl = ('%s://%s%s' % (((request.is_secure() and 'https') or 'http'), new_url[0], urlquote(new_url[1]))) else: newurl = urlquote(new_url[1]) if request.META.get('QUERY_STRING', ''): if six.PY3: newurl += ('?' + request.META['QUERY_STRING']) else: try: newurl += ('?' + request.META['QUERY_STRING'].decode()) except UnicodeDecodeError: pass return http.HttpResponsePermanentRedirect(newurl)
'Send broken link emails and calculate the Etag, if needed.'
def process_response(self, request, response):
if (response.status_code == 404): if (settings.SEND_BROKEN_LINK_EMAILS and (not settings.DEBUG)): domain = request.get_host() referer = request.META.get('HTTP_REFERER', None) is_internal = _is_internal_request(domain, referer) path = request.get_full_path() if (referer and (not _is_ignorable_404(path)) and (is_internal or ('?' not in referer))): ua = request.META.get('HTTP_USER_AGENT', '<none>') ip = request.META.get('REMOTE_ADDR', '<none>') mail_managers(('Broken %slink on %s' % (((is_internal and 'INTERNAL ') or ''), domain)), ('Referrer: %s\nRequested URL: %s\nUser agent: %s\nIP address: %s\n' % (referer, request.get_full_path(), ua, ip)), fail_silently=True) return response if settings.USE_ETAGS: if response.has_header('ETag'): etag = response['ETag'] elif response.streaming: etag = None else: etag = ('"%s"' % hashlib.md5(response.content).hexdigest()) if (etag is not None): if ((200 <= response.status_code < 300) and (request.META.get('HTTP_IF_NONE_MATCH') == etag)): cookies = response.cookies response = http.HttpResponseNotModified() response.cookies = cookies else: response['ETag'] = etag return response
'Gets the value to set for the X_FRAME_OPTIONS header. By default this uses the value from the X_FRAME_OPTIONS Django settings. If not found in settings, defaults to \'SAMEORIGIN\'. This method can be overridden if needed, allowing it to vary based on the request or response.'
def get_xframe_options_value(self, request, response):
return getattr(settings, 'X_FRAME_OPTIONS', 'SAMEORIGIN').upper()
'Returns `True` if the `LocaleRegexURLResolver` is used at root level of the urlpatterns, else it returns `False`.'
def is_language_prefix_patterns_used(self):
for url_pattern in get_resolver(None).url_patterns: if isinstance(url_pattern, LocaleRegexURLResolver): return True return False
'Enters transaction management'
def process_request(self, request):
transaction.enter_transaction_management() transaction.managed(True)
'Rolls back the database and leaves transaction management'
def process_exception(self, request, exception):
if transaction.is_dirty(): transaction.rollback() transaction.leave_transaction_management()
'Commits and leaves transaction management.'
def process_response(self, request, response):
if transaction.is_managed(): if transaction.is_dirty(): try: transaction.commit() except Exception: transaction.rollback() transaction.leave_transaction_management() raise transaction.leave_transaction_management() return response
'If the request method is HEAD and either the IP is internal or the user is a logged-in staff member, quickly return with an x-header indicating the view function. This is used by the documentation module to lookup the view function for an arbitrary page.'
def process_view(self, request, view_func, view_args, view_kwargs):
assert hasattr(request, 'user'), "The XView middleware requires authentication middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.auth.middleware.AuthenticationMiddleware'." if ((request.method == 'HEAD') and ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS) or (request.user.is_active and request.user.is_staff))): response = http.HttpResponse() response['X-View'] = ('%s.%s' % (view_func.__module__, view_func.__name__)) return response
'Sets the cache, if needed.'
def process_response(self, request, response):
if (not self._should_update_cache(request, response)): return response if (response.streaming or (response.status_code != 200)): return response timeout = get_max_age(response) if (timeout == None): timeout = self.cache_timeout elif (timeout == 0): return response patch_response_headers(response, timeout) if timeout: cache_key = learn_cache_key(request, response, timeout, self.key_prefix, cache=self.cache) if (hasattr(response, 'render') and callable(response.render)): response.add_post_render_callback((lambda r: self.cache.set(cache_key, r, timeout))) else: self.cache.set(cache_key, response, timeout) return response
'Checks whether the page is already cached and returns the cached version if available.'
def process_request(self, request):
if (not (request.method in ('GET', 'HEAD'))): request._cache_update_cache = False return None cache_key = get_cache_key(request, self.key_prefix, 'GET', cache=self.cache) if (cache_key is None): request._cache_update_cache = True return None response = self.cache.get(cache_key, None) if ((response is None) and (request.method == 'HEAD')): cache_key = get_cache_key(request, self.key_prefix, 'HEAD', cache=self.cache) response = self.cache.get(cache_key, None) if (response is None): request._cache_update_cache = True return None request._cache_update_cache = False return response
'Class method to parse prefix node and return a Node.'
@classmethod def handle_token(cls, parser, token, name):
tokens = token.contents.split() if ((len(tokens) > 1) and (tokens[1] != 'as')): raise template.TemplateSyntaxError(("First argument in '%s' must be 'as'" % tokens[0])) if (len(tokens) > 1): varname = tokens[2] else: varname = None return cls(varname, name)
'Class method to parse prefix node and return a Node.'
@classmethod def handle_token(cls, parser, token):
bits = token.split_contents() if (len(bits) < 2): raise template.TemplateSyntaxError(("'%s' takes at least one argument (path to file)" % bits[0])) path = parser.compile_filter(bits[1]) if ((len(bits) >= 2) and (bits[(-2)] == 'as')): varname = bits[3] else: varname = None return cls(varname, path)
'Returns the object the view is displaying. By default this requires `self.queryset` and a `pk` or `slug` argument in the URLconf, but subclasses can override this to return any object.'
def get_object(self, queryset=None):
if (queryset is None): queryset = self.get_queryset() pk = self.kwargs.get(self.pk_url_kwarg, None) slug = self.kwargs.get(self.slug_url_kwarg, None) if (pk is not None): queryset = queryset.filter(pk=pk) elif (slug is not None): slug_field = self.get_slug_field() queryset = queryset.filter(**{slug_field: slug}) else: raise AttributeError((u'Generic detail view %s must be called with either an object pk or a slug.' % self.__class__.__name__)) try: obj = queryset.get() except ObjectDoesNotExist: raise Http404((_(u'No %(verbose_name)s found matching the query') % {u'verbose_name': queryset.model._meta.verbose_name})) return obj
'Get the queryset to look an object up against. May not be called if `get_object` is overridden.'
def get_queryset(self):
if (self.queryset is None): if self.model: return self.model._default_manager.all() else: raise ImproperlyConfigured((u'%(cls)s is missing a queryset. Define %(cls)s.model, %(cls)s.queryset, or override %(cls)s.get_queryset().' % {u'cls': self.__class__.__name__})) return self.queryset._clone()
'Get the name of a slug field to be used to look up by slug.'
def get_slug_field(self):
return self.slug_field
'Get the name to use for the object.'
def get_context_object_name(self, obj):
if self.context_object_name: return self.context_object_name elif isinstance(obj, models.Model): return obj._meta.object_name.lower() else: return None
'Insert the single object into the context dict.'
def get_context_data(self, **kwargs):
context = {} context_object_name = self.get_context_object_name(self.object) if context_object_name: context[context_object_name] = self.object context.update(kwargs) return super(SingleObjectMixin, self).get_context_data(**context)
'Return a list of template names to be used for the request. May not be called if render_to_response is overridden. Returns the following list: * the value of ``template_name`` on the view (if provided) * the contents of the ``template_name_field`` field on the object instance that the view is operating upon (if available) * ``<app_label>/<object_name><template_name_suffix>.html``'
def get_template_names(self):
try: names = super(SingleObjectTemplateResponseMixin, self).get_template_names() except ImproperlyConfigured: names = [] if (self.object and self.template_name_field): name = getattr(self.object, self.template_name_field, None) if name: names.insert(0, name) if isinstance(self.object, models.Model): names.append((u'%s/%s%s.html' % (self.object._meta.app_label, self.object._meta.object_name.lower(), self.template_name_suffix))) elif (hasattr(self, u'model') and (self.model is not None) and issubclass(self.model, models.Model)): names.append((u'%s/%s%s.html' % (self.model._meta.app_label, self.model._meta.object_name.lower(), self.template_name_suffix))) return names
'Constructor. Called in the URLconf; can contain helpful extra keyword arguments, and other things.'
def __init__(self, **kwargs):
for (key, value) in six.iteritems(kwargs): setattr(self, key, value)
'Main entry point for a request-response process.'
@classonlymethod def as_view(cls, **initkwargs):
for key in initkwargs: if (key in cls.http_method_names): raise TypeError((u"You tried to pass in the %s method name as a keyword argument to %s(). Don't do that." % (key, cls.__name__))) if (not hasattr(cls, key)): raise TypeError((u'%s() received an invalid keyword %r. as_view only accepts arguments that are already attributes of the class.' % (cls.__name__, key))) def view(request, *args, **kwargs): self = cls(**initkwargs) if (hasattr(self, u'get') and (not hasattr(self, u'head'))): self.head = self.get self.request = request self.args = args self.kwargs = kwargs return self.dispatch(request, *args, **kwargs) update_wrapper(view, cls, updated=()) update_wrapper(view, cls.dispatch, assigned=()) return view
'Handles responding to requests for the OPTIONS HTTP verb.'
def options(self, request, *args, **kwargs):
response = http.HttpResponse() response[u'Allow'] = u', '.join(self._allowed_methods()) response[u'Content-Length'] = u'0' return response
'Returns a response, using the `response_class` for this view, with a template rendered with the given context. If any keyword arguments are provided, they will be passed to the constructor of the response class.'
def render_to_response(self, context, **response_kwargs):
response_kwargs.setdefault(u'content_type', self.content_type) return self.response_class(request=self.request, template=self.get_template_names(), context=context, **response_kwargs)
'Returns a list of template names to be used for the request. Must return a list. May not be called if render_to_response is overridden.'
def get_template_names(self):
if (self.template_name is None): raise ImproperlyConfigured(u"TemplateResponseMixin requires either a definition of 'template_name' or an implementation of 'get_template_names()'") else: return [self.template_name]
'Return the URL redirect to. Keyword arguments from the URL pattern match generating the redirect request are provided as kwargs to this method.'
def get_redirect_url(self, **kwargs):
if self.url: url = (self.url % kwargs) args = self.request.META.get(u'QUERY_STRING', u'') if (args and self.query_string): url = (u'%s?%s' % (url, args)) return url else: return None
'Get a year format string in strptime syntax to be used to parse the year from url variables.'
def get_year_format(self):
return self.year_format
'Return the year for which this view should display data.'
def get_year(self):
year = self.year if (year is None): try: year = self.kwargs[u'year'] except KeyError: try: year = self.request.GET[u'year'] except KeyError: raise Http404(_(u'No year specified')) return year
'Get the next valid year.'
def get_next_year(self, date):
return _get_next_prev(self, date, is_previous=False, period=u'year')
'Get the previous valid year.'
def get_previous_year(self, date):
return _get_next_prev(self, date, is_previous=True, period=u'year')
'Return the start date of the next interval. The interval is defined by start date <= item date < next start date.'
def _get_next_year(self, date):
return date.replace(year=(date.year + 1), month=1, day=1)
'Return the start date of the current interval.'
def _get_current_year(self, date):
return date.replace(month=1, day=1)
'Get a month format string in strptime syntax to be used to parse the month from url variables.'
def get_month_format(self):
return self.month_format
'Return the month for which this view should display data.'
def get_month(self):
month = self.month if (month is None): try: month = self.kwargs[u'month'] except KeyError: try: month = self.request.GET[u'month'] except KeyError: raise Http404(_(u'No month specified')) return month
'Get the next valid month.'
def get_next_month(self, date):
return _get_next_prev(self, date, is_previous=False, period=u'month')
'Get the previous valid month.'
def get_previous_month(self, date):
return _get_next_prev(self, date, is_previous=True, period=u'month')
'Return the start date of the next interval. The interval is defined by start date <= item date < next start date.'
def _get_next_month(self, date):
if (date.month == 12): return date.replace(year=(date.year + 1), month=1, day=1) else: return date.replace(month=(date.month + 1), day=1)
'Return the start date of the previous interval.'
def _get_current_month(self, date):
return date.replace(day=1)
'Get a day format string in strptime syntax to be used to parse the day from url variables.'
def get_day_format(self):
return self.day_format
'Return the day for which this view should display data.'
def get_day(self):
day = self.day if (day is None): try: day = self.kwargs[u'day'] except KeyError: try: day = self.request.GET[u'day'] except KeyError: raise Http404(_(u'No day specified')) return day
'Get the next valid day.'
def get_next_day(self, date):
return _get_next_prev(self, date, is_previous=False, period=u'day')
'Get the previous valid day.'
def get_previous_day(self, date):
return _get_next_prev(self, date, is_previous=True, period=u'day')
'Return the start date of the next interval. The interval is defined by start date <= item date < next start date.'
def _get_next_day(self, date):
return (date + datetime.timedelta(days=1))
'Return the start date of the current interval.'
def _get_current_day(self, date):
return date