desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Delete a variable from the current context'
| def __delitem__(self, key):
| del self.dicts[(-1)][key]
|
'Returns a new context with the same properties, but with only the
values given in \'values\' stored.'
| def new(self, values=None):
| new_context = copy(self)
new_context._reset_dicts(values)
return new_context
|
'Pushes other_dict to the stack of dictionaries in the Context'
| def update(self, other_dict):
| if (not hasattr(other_dict, '__getitem__')):
raise TypeError('other_dict must be a mapping (dictionary-like) object.')
self.dicts.append(other_dict)
return other_dict
|
'Return a list of tokens from a given template_string'
| def tokenize(self):
| (result, upto) = ([], 0)
for match in tag_re.finditer(self.template_string):
(start, end) = match.span()
if (start > upto):
result.append(self.create_token(self.template_string[upto:start], (upto, start), False))
upto = start
result.append(self.create_token(self.template_string[start:end], (start, end), True))
upto = end
last_bit = self.template_string[upto:]
if last_bit:
result.append(self.create_token(last_bit, (upto, (upto + len(last_bit))), False))
return result
|
'The flatpage admin form correctly validates urls'
| def test_flatpage_admin_form_url_validation(self):
| self.assertTrue(FlatpageForm(data=dict(url=u'/new_flatpage/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url=u'/some.special~chars/', **self.form_data)).is_valid())
self.assertTrue(FlatpageForm(data=dict(url=u'/some.very_special~chars-here/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url=u'/a space/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url=u'/a % char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url=u'/a ! char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url=u'/a & char/', **self.form_data)).is_valid())
self.assertFalse(FlatpageForm(data=dict(url=u'/a ? char/', **self.form_data)).is_valid())
|
'The flatpage admin form correctly enforces url uniqueness among flatpages of the same site'
| def test_flatpage_admin_form_url_uniqueness_validation(self):
| data = dict(url=u'/myflatpage1/', **self.form_data)
FlatpageForm(data=data).save()
f = FlatpageForm(data=data)
self.assertFalse(f.is_valid())
self.assertEqual(f.errors, {u'__all__': [u'Flatpage with url /myflatpage1/ already exists for site example.com']})
|
'Existing flatpages can be edited in the admin form without triggering
the url-uniqueness validation.'
| def test_flatpage_admin_form_edit(self):
| existing = FlatPage.objects.create(url=u'/myflatpage1/', title=u'Some page', content=u'The content')
existing.sites.add(settings.SITE_ID)
data = dict(url=u'/myflatpage1/', **self.form_data)
f = FlatpageForm(data=data, instance=existing)
self.assertTrue(f.is_valid(), f.errors)
updated = f.save()
self.assertEqual(updated.title, u'A test page')
|
'A flatpage can be served through a view, even when the middleware is in use'
| def test_view_flatpage(self):
| response = self.client.get('/flatpage_root/flatpage/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
|
'A non-existent flatpage raises 404 when served through a view, even when the middleware is in use'
| def test_view_non_existent_flatpage(self):
| response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
|
'A flatpage served through a view can require authentication'
| def test_view_authenticated_flatpage(self):
| response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
User.objects.create_user('testuser', '[email protected]', 's3krit')
self.client.login(username='testuser', password='s3krit')
response = self.client.get('/flatpage_root/sekrit/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it sekrit!</p>")
|
'A flatpage can be served by the fallback middlware'
| def test_fallback_flatpage(self):
| response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
|
'A non-existent flatpage raises a 404 when served by the fallback middlware'
| def test_fallback_non_existent_flatpage(self):
| response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
|
'POSTing to a flatpage served through a view will raise a CSRF error if no token is provided (Refs #14156)'
| def test_post_view_flatpage(self):
| response = self.client.post('/flatpage_root/flatpage/')
self.assertEqual(response.status_code, 403)
|
'POSTing to a flatpage served by the middleware will raise a CSRF error if no token is provided (Refs #14156)'
| def test_post_fallback_flatpage(self):
| response = self.client.post('/flatpage/')
self.assertEqual(response.status_code, 403)
|
'POSTing to an unknown page isn\'t caught as a 403 CSRF error'
| def test_post_unknown_page(self):
| response = self.client.post('/no_such_page/')
self.assertEqual(response.status_code, 404)
|
'The flatpage template tag retrives unregistered prefixed flatpages by default'
| def test_get_flatpages_tag(self):
| out = Template('{% load flatpages %}{% get_flatpages as flatpages %}{% for page in flatpages %}{{ page.title }},{% endfor %}').render(Context())
self.assertEqual(out, 'A Flatpage,A Nested Flatpage,')
|
'The flatpage template tag retrives unregistered flatpages for an anonymous user'
| def test_get_flatpages_tag_for_anon_user(self):
| out = Template('{% load flatpages %}{% get_flatpages for anonuser as flatpages %}{% for page in flatpages %}{{ page.title }},{% endfor %}').render(Context({'anonuser': AnonymousUser()}))
self.assertEqual(out, 'A Flatpage,A Nested Flatpage,')
|
'The flatpage template tag retrives all flatpages for an authenticated user'
| def test_get_flatpages_tag_for_user(self):
| out = Template('{% load flatpages %}{% get_flatpages for me as flatpages %}{% for page in flatpages %}{{ page.title }},{% endfor %}').render(Context({'me': self.me}))
self.assertEqual(out, 'A Flatpage,A Nested Flatpage,Sekrit Nested Flatpage,Sekrit Flatpage,')
|
'The flatpage template tag retrives unregistered prefixed flatpages by default'
| def test_get_flatpages_with_prefix(self):
| out = Template("{% load flatpages %}{% get_flatpages '/location/' as location_flatpages %}{% for page in location_flatpages %}{{ page.title }},{% endfor %}").render(Context())
self.assertEqual(out, 'A Nested Flatpage,')
|
'The flatpage template tag retrives unregistered prefixed flatpages for an anonymous user'
| def test_get_flatpages_with_prefix_for_anon_user(self):
| out = Template("{% load flatpages %}{% get_flatpages '/location/' for anonuser as location_flatpages %}{% for page in location_flatpages %}{{ page.title }},{% endfor %}").render(Context({'anonuser': AnonymousUser()}))
self.assertEqual(out, 'A Nested Flatpage,')
|
'The flatpage template tag retrive prefixed flatpages for an authenticated user'
| def test_get_flatpages_with_prefix_for_user(self):
| out = Template("{% load flatpages %}{% get_flatpages '/location/' for me as location_flatpages %}{% for page in location_flatpages %}{{ page.title }},{% endfor %}").render(Context({'me': self.me}))
self.assertEqual(out, 'A Nested Flatpage,Sekrit Nested Flatpage,')
|
'The prefix for the flatpage template tag can be a template variable'
| def test_get_flatpages_with_variable_prefix(self):
| out = Template('{% load flatpages %}{% get_flatpages location_prefix as location_flatpages %}{% for page in location_flatpages %}{{ page.title }},{% endfor %}').render(Context({'location_prefix': '/location/'}))
self.assertEqual(out, 'A Nested Flatpage,')
|
'There are various ways that the flatpages template tag won\'t parse'
| def test_parsing_errors(self):
| render = (lambda t: Template(t).render(Context()))
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages %}')
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages as %}')
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages cheesecake flatpages %}')
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages as flatpages asdf%}')
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages cheesecake user as flatpages %}')
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages for user as flatpages asdf%}')
self.assertRaises(TemplateSyntaxError, render, '{% load flatpages %}{% get_flatpages prefix for user as flatpages asdf%}')
|
'A flatpage can be served through a view'
| def test_view_flatpage(self):
| response = self.client.get('/flatpage_root/flatpage/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
|
'A non-existent flatpage raises 404 when served through a view'
| def test_view_non_existent_flatpage(self):
| response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
|
'A flatpage served through a view can require authentication'
| def test_view_authenticated_flatpage(self):
| response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
User.objects.create_user('testuser', '[email protected]', 's3krit')
self.client.login(username='testuser', password='s3krit')
response = self.client.get('/flatpage_root/sekrit/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it sekrit!</p>")
|
'A fallback flatpage won\'t be served if the middleware is disabled'
| def test_fallback_flatpage(self):
| response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 404)
|
'A non-existent flatpage won\'t be served if the fallback middlware is disabled'
| def test_fallback_non_existent_flatpage(self):
| response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
|
'A flatpage with special chars in the URL can be served through a view'
| def test_view_flatpage_special_chars(self):
| fp = FlatPage.objects.create(url='/some.very_special~chars-here/', title='A very special page', content="Isn't it special!", enable_comments=False, registration_required=False)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/flatpage_root/some.very_special~chars-here/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it special!</p>")
|
'A flatpage can be served through a view and should add a slash'
| def test_redirect_view_flatpage(self):
| response = self.client.get('/flatpage_root/flatpage')
self.assertRedirects(response, '/flatpage_root/flatpage/', status_code=301)
|
'A non-existent flatpage raises 404 when served through a view and should not add a slash'
| def test_redirect_view_non_existent_flatpage(self):
| response = self.client.get('/flatpage_root/no_such_flatpage')
self.assertEqual(response.status_code, 404)
|
'A fallback flatpage won\'t be served if the middleware is disabled and should not add a slash'
| def test_redirect_fallback_flatpage(self):
| response = self.client.get('/flatpage')
self.assertEqual(response.status_code, 404)
|
'A non-existent flatpage won\'t be served if the fallback middlware is disabled and should not add a slash'
| def test_redirect_fallback_non_existent_flatpage(self):
| response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404)
|
'A flatpage with special chars in the URL can be served through a view and should add a slash'
| def test_redirect_view_flatpage_special_chars(self):
| fp = FlatPage.objects.create(url='/some.very_special~chars-here/', title='A very special page', content="Isn't it special!", enable_comments=False, registration_required=False)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/flatpage_root/some.very_special~chars-here')
self.assertRedirects(response, '/flatpage_root/some.very_special~chars-here/', status_code=301)
|
'A flatpage can be served through a view, even when the middleware is in use'
| def test_view_flatpage(self):
| response = self.client.get('/flatpage_root/flatpage/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
|
'A non-existent flatpage raises 404 when served through a view, even when the middleware is in use'
| def test_view_non_existent_flatpage(self):
| response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
|
'A flatpage served through a view can require authentication'
| def test_view_authenticated_flatpage(self):
| response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
User.objects.create_user('testuser', '[email protected]', 's3krit')
self.client.login(username='testuser', password='s3krit')
response = self.client.get('/flatpage_root/sekrit/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it sekrit!</p>")
|
'A flatpage can be served by the fallback middlware'
| def test_fallback_flatpage(self):
| response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>")
|
'A non-existent flatpage raises a 404 when served by the fallback middlware'
| def test_fallback_non_existent_flatpage(self):
| response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
|
'A flatpage served by the middleware can require authentication'
| def test_fallback_authenticated_flatpage(self):
| response = self.client.get('/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/sekrit/')
User.objects.create_user('testuser', '[email protected]', 's3krit')
self.client.login(username='testuser', password='s3krit')
response = self.client.get('/sekrit/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it sekrit!</p>")
|
'A flatpage with special chars in the URL can be served by the fallback middleware'
| def test_fallback_flatpage_special_chars(self):
| fp = FlatPage.objects.create(url='/some.very_special~chars-here/', title='A very special page', content="Isn't it special!", enable_comments=False, registration_required=False)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/some.very_special~chars-here/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it special!</p>")
|
'A flatpage can be served through a view and should add a slash'
| def test_redirect_view_flatpage(self):
| response = self.client.get('/flatpage_root/flatpage')
self.assertRedirects(response, '/flatpage_root/flatpage/', status_code=301)
|
'A non-existent flatpage raises 404 when served through a view and should not add a slash'
| def test_redirect_view_non_existent_flatpage(self):
| response = self.client.get('/flatpage_root/no_such_flatpage')
self.assertEqual(response.status_code, 404)
|
'A flatpage can be served by the fallback middlware and should add a slash'
| def test_redirect_fallback_flatpage(self):
| response = self.client.get('/flatpage')
self.assertRedirects(response, '/flatpage/', status_code=301)
|
'A non-existent flatpage raises a 404 when served by the fallback middlware and should not add a slash'
| def test_redirect_fallback_non_existent_flatpage(self):
| response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404)
|
'A flatpage with special chars in the URL can be served by the fallback middleware and should add a slash'
| def test_redirect_fallback_flatpage_special_chars(self):
| fp = FlatPage.objects.create(url='/some.very_special~chars-here/', title='A very special page', content="Isn't it special!", enable_comments=False, registration_required=False)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/some.very_special~chars-here')
self.assertRedirects(response, '/some.very_special~chars-here/', status_code=301)
|
'A flatpage at / should not cause a redirect loop when APPEND_SLASH is set'
| def test_redirect_fallback_flatpage_root(self):
| fp = FlatPage.objects.create(url='/', title='Root', content='Root', enable_comments=False, registration_required=False)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p>Root</p>')
|
'Returns the static files serving handler wrapping the default handler,
if static files should be served. Otherwise just returns the default
handler.'
| def get_handler(self, *args, **options):
| handler = super(Command, self).get_handler(*args, **options)
use_static_handler = options.get('use_static_handler', True)
insecure_serving = options.get('insecure_serving', False)
if (use_static_handler and (settings.DEBUG or insecure_serving)):
return StaticFilesHandler(handler)
return handler
|
'Set instance variables based on an options dict'
| def set_options(self, **options):
| self.interactive = options[u'interactive']
self.verbosity = int(options.get(u'verbosity', 1))
self.symlink = options[u'link']
self.clear = options[u'clear']
self.dry_run = options[u'dry_run']
ignore_patterns = options[u'ignore_patterns']
if options[u'use_default_ignore_patterns']:
ignore_patterns += [u'CVS', u'.*', u'*~']
self.ignore_patterns = list(set(ignore_patterns))
self.post_process = options[u'post_process']
|
'Perform the bulk of the work of collectstatic.
Split off from handle_noargs() to facilitate testing.'
| def collect(self):
| if self.symlink:
if (sys.platform == u'win32'):
raise CommandError((u'Symlinking is not supported by this platform (%s).' % sys.platform))
if (not self.local):
raise CommandError(u"Can't symlink to a remote destination.")
if self.clear:
self.clear_dir(u'')
if self.symlink:
handler = self.link_file
else:
handler = self.copy_file
found_files = SortedDict()
for finder in finders.get_finders():
for (path, storage) in finder.list(self.ignore_patterns):
if getattr(storage, u'prefix', None):
prefixed_path = os.path.join(storage.prefix, path)
else:
prefixed_path = path
if (prefixed_path not in found_files):
found_files[prefixed_path] = (storage, path)
handler(path, prefixed_path, storage)
if (self.post_process and hasattr(self.storage, u'post_process')):
processor = self.storage.post_process(found_files, dry_run=self.dry_run)
for (original_path, processed_path, processed) in processor:
if processed:
self.log((u"Post-processed '%s' as '%s" % (original_path, processed_path)), level=1)
self.post_processed_files.append(original_path)
else:
self.log((u"Skipped post-processing '%s'" % original_path))
return {u'modified': (self.copied_files + self.symlinked_files), u'unmodified': self.unmodified_files, u'post_processed': self.post_processed_files}
|
'Small log helper'
| def log(self, msg, level=2):
| if (self.verbosity >= level):
self.stdout.write(msg)
|
'Deletes the given relative path using the destination storage backend.'
| def clear_dir(self, path):
| (dirs, files) = self.storage.listdir(path)
for f in files:
fpath = os.path.join(path, f)
if self.dry_run:
self.log((u"Pretending to delete '%s'" % smart_text(fpath)), level=1)
else:
self.log((u"Deleting '%s'" % smart_text(fpath)), level=1)
self.storage.delete(fpath)
for d in dirs:
self.clear_dir(os.path.join(path, d))
|
'Checks if the target file should be deleted if it already exists'
| def delete_file(self, path, prefixed_path, source_storage):
| if self.storage.exists(prefixed_path):
try:
target_last_modified = self.storage.modified_time(prefixed_path)
except (OSError, NotImplementedError, AttributeError):
pass
else:
try:
source_last_modified = source_storage.modified_time(path)
except (OSError, NotImplementedError, AttributeError):
pass
else:
if self.local:
full_path = self.storage.path(prefixed_path)
else:
full_path = None
if (target_last_modified >= source_last_modified):
if (not ((self.symlink and full_path and (not os.path.islink(full_path))) or ((not self.symlink) and full_path and os.path.islink(full_path)))):
if (prefixed_path not in self.unmodified_files):
self.unmodified_files.append(prefixed_path)
self.log((u"Skipping '%s' (not modified)" % path))
return False
if self.dry_run:
self.log((u"Pretending to delete '%s'" % path))
else:
self.log((u"Deleting '%s'" % path))
self.storage.delete(prefixed_path)
return True
|
'Attempt to link ``path``'
| def link_file(self, path, prefixed_path, source_storage):
| if (prefixed_path in self.symlinked_files):
return self.log((u"Skipping '%s' (already linked earlier)" % path))
if (not self.delete_file(path, prefixed_path, source_storage)):
return
source_path = source_storage.path(path)
if self.dry_run:
self.log((u"Pretending to link '%s'" % source_path), level=1)
else:
self.log((u"Linking '%s'" % source_path), level=1)
full_path = self.storage.path(prefixed_path)
try:
os.makedirs(os.path.dirname(full_path))
except OSError:
pass
os.symlink(source_path, full_path)
if (prefixed_path not in self.symlinked_files):
self.symlinked_files.append(prefixed_path)
|
'Attempt to copy ``path`` with storage'
| def copy_file(self, path, prefixed_path, source_storage):
| if (prefixed_path in self.copied_files):
return self.log((u"Skipping '%s' (already copied earlier)" % path))
if (not self.delete_file(path, prefixed_path, source_storage)):
return
source_path = source_storage.path(path)
if self.dry_run:
self.log((u"Pretending to copy '%s'" % source_path), level=1)
else:
self.log((u"Copying '%s'" % source_path), level=1)
if self.local:
full_path = self.storage.path(prefixed_path)
try:
os.makedirs(os.path.dirname(full_path))
except OSError:
pass
with source_storage.open(path) as source_file:
self.storage.save(prefixed_path, source_file)
if (not (prefixed_path in self.copied_files)):
self.copied_files.append(prefixed_path)
|
'Checks if the path should be handled. Ignores the path if:
* the host is provided as part of the base_url
* the request\'s path isn\'t under the media path (or equal)'
| def _should_handle(self, path):
| return (path.startswith(self.base_url[2]) and (not self.base_url[1]))
|
'Returns the relative path to the media file on disk for the given URL.'
| def file_path(self, url):
| relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
|
'Actually serves the request path.'
| def serve(self, request):
| return serve(request, self.file_path(request.path), insecure=True)
|
'Retuns a hash of the file with the given name and optional content.'
| def file_hash(self, name, content=None):
| if (content is None):
return None
md5 = hashlib.md5()
for chunk in content.chunks():
md5.update(chunk)
return md5.hexdigest()[:12]
|
'Returns the real URL in DEBUG mode.'
| def url(self, name, force=False):
| if (settings.DEBUG and (not force)):
(hashed_name, fragment) = (name, u'')
else:
(clean_name, fragment) = urldefrag(name)
if urlsplit(clean_name).path.endswith(u'/'):
hashed_name = name
else:
cache_key = self.cache_key(name)
hashed_name = self.cache.get(cache_key)
if (hashed_name is None):
hashed_name = self.hashed_name(clean_name).replace(u'\\', u'/')
self.cache.set(cache_key, hashed_name)
final_url = super(CachedFilesMixin, self).url(hashed_name)
query_fragment = (u'?#' in name)
if (fragment or query_fragment):
urlparts = list(urlsplit(final_url))
if (fragment and (not urlparts[4])):
urlparts[4] = fragment
if (query_fragment and (not urlparts[3])):
urlparts[2] += u'?'
final_url = urlunsplit(urlparts)
return unquote(final_url)
|
'Returns the custom URL converter for the given file name.'
| def url_converter(self, name, template=None):
| if (template is None):
template = self.default_template
def converter(matchobj):
u'\n Converts the matched URL depending on the parent level (`..`)\n and returns the normalized and hashed URL using the url method\n of the storage.\n '
(matched, url) = matchobj.groups()
if url.startswith((u'#', u'http:', u'https:', u'data:', u'//')):
return matched
name_parts = name.split(os.sep)
url = posixpath.normpath(url)
url_parts = url.split(u'/')
(parent_level, sub_level) = (url.count(u'..'), url.count(u'/'))
if url.startswith(u'/'):
sub_level -= 1
url_parts = url_parts[1:]
if (parent_level or (not url.startswith(u'/'))):
(start, end) = ((parent_level + 1), parent_level)
elif sub_level:
if (sub_level == 1):
parent_level -= 1
(start, end) = (parent_level, 1)
else:
(start, end) = (1, (sub_level - 1))
joined_result = u'/'.join((name_parts[:(- start)] + url_parts[end:]))
hashed_url = self.url(unquote(joined_result), force=True)
file_name = hashed_url.split(u'/')[(-1):]
relative_url = u'/'.join((url.split(u'/')[:(-1)] + file_name))
return (template % unquote(relative_url))
return converter
|
'Post process the given list of files (called from collectstatic).
Processing is actually two separate operations:
1. renaming files to include a hash of their content for cache-busting,
and copying those files to the target storage.
2. adjusting files which contain references to other files so they
refer to the cache-busting filenames.
If either of these are performed on a file, then that file is considered
post-processed.'
| def post_process(self, paths, dry_run=False, **options):
| if dry_run:
return
hashed_paths = {}
matches = (lambda path: matches_patterns(path, self._patterns.keys()))
adjustable_paths = [path for path in paths if matches(path)]
path_level = (lambda name: len(name.split(os.sep)))
for name in sorted(paths.keys(), key=path_level, reverse=True):
(storage, path) = paths[name]
with storage.open(path) as original_file:
hashed_name = self.hashed_name(name, original_file)
if hasattr(original_file, u'seek'):
original_file.seek(0)
hashed_file_exists = self.exists(hashed_name)
processed = False
if (name in adjustable_paths):
content = original_file.read().decode(settings.FILE_CHARSET)
for patterns in self._patterns.values():
for (pattern, template) in patterns:
converter = self.url_converter(name, template)
content = pattern.sub(converter, content)
if hashed_file_exists:
self.delete(hashed_name)
content_file = ContentFile(force_bytes(content))
saved_name = self._save(hashed_name, content_file)
hashed_name = force_text(saved_name.replace(u'\\', u'/'))
processed = True
elif (not hashed_file_exists):
processed = True
saved_name = self._save(hashed_name, original_file)
hashed_name = force_text(saved_name.replace(u'\\', u'/'))
hashed_paths[self.cache_key(name.replace(u'\\', u'/'))] = hashed_name
(yield (name, hashed_name, processed))
self.cache.set_many(hashed_paths)
|
'Returns a static file storage if available in the given app.'
| def __init__(self, app, *args, **kwargs):
| mod = import_module(app)
mod_path = os.path.dirname(upath(mod.__file__))
location = os.path.join(mod_path, self.source_dir)
super(AppStaticStorage, self).__init__(location, *args, **kwargs)
|
'Given a relative file path this ought to find an
absolute file path.
If the ``all`` parameter is ``False`` (default) only
the first found file path will be returned; if set
to ``True`` a list of all found files paths is returned.'
| def find(self, path, all=False):
| raise NotImplementedError()
|
'Given an optional list of paths to ignore, this should return
a two item iterable consisting of the relative path and storage
instance.'
| def list(self, ignore_patterns):
| raise NotImplementedError()
|
'Looks for files in the extra locations
as defined in ``STATICFILES_DIRS``.'
| def find(self, path, all=False):
| matches = []
for (prefix, root) in self.locations:
matched_path = self.find_location(root, path, prefix)
if matched_path:
if (not all):
return matched_path
matches.append(matched_path)
return matches
|
'Finds a requested static file in a location, returning the found
absolute path (or ``None`` if no match).'
| def find_location(self, root, path, prefix=None):
| if prefix:
prefix = ('%s%s' % (prefix, os.sep))
if (not path.startswith(prefix)):
return None
path = path[len(prefix):]
path = safe_join(root, path)
if os.path.exists(path):
return path
|
'List all files in all locations.'
| def list(self, ignore_patterns):
| for (prefix, root) in self.locations:
storage = self.storages[root]
for path in utils.get_files(storage, ignore_patterns):
(yield (path, storage))
|
'List all files in all app storages.'
| def list(self, ignore_patterns):
| for storage in six.itervalues(self.storages):
if storage.exists(''):
for path in utils.get_files(storage, ignore_patterns):
(yield (path, storage))
|
'Looks for files in the app directories.'
| def find(self, path, all=False):
| matches = []
for app in self.apps:
match = self.find_in_app(app, path)
if match:
if (not all):
return match
matches.append(match)
return matches
|
'Find a requested static file in an app\'s static locations.'
| def find_in_app(self, app, path):
| storage = self.storages.get(app, None)
if storage:
if storage.prefix:
prefix = ('%s%s' % (storage.prefix, os.sep))
if (not path.startswith(prefix)):
return None
path = path[len(prefix):]
if storage.exists(path):
matched_path = storage.path(path)
if matched_path:
return matched_path
|
'Looks for files in the default file storage, if it\'s local.'
| def find(self, path, all=False):
| try:
self.storage.path('')
except NotImplementedError:
pass
else:
if self.storage.exists(path):
match = self.storage.path(path)
if all:
match = [match]
return match
return []
|
'List all files of the storage.'
| def list(self, ignore_patterns):
| for path in utils.get_files(self.storage, ignore_patterns):
(yield (path, self.storage))
|
'Returns an extra keyword arguments dictionary that is used when
initializing the feed generator.'
| def feed_extra_kwargs(self, obj):
| return {}
|
'Returns an extra keyword arguments dictionary that is used with
the `add_item` call of the feed generator.'
| def item_extra_kwargs(self, item):
| return {}
|
'Returns a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raises FeedDoesNotExist for invalid parameters.'
| def get_feed(self, obj, request):
| current_site = get_current_site(request)
link = self.__get_dynamic_attr(u'link', obj)
link = add_domain(current_site.domain, link, request.is_secure())
feed = self.feed_type(title=self.__get_dynamic_attr(u'title', obj), subtitle=self.__get_dynamic_attr(u'subtitle', obj), link=link, description=self.__get_dynamic_attr(u'description', obj), language=settings.LANGUAGE_CODE, feed_url=add_domain(current_site.domain, (self.__get_dynamic_attr(u'feed_url', obj) or request.path), request.is_secure()), author_name=self.__get_dynamic_attr(u'author_name', obj), author_link=self.__get_dynamic_attr(u'author_link', obj), author_email=self.__get_dynamic_attr(u'author_email', obj), categories=self.__get_dynamic_attr(u'categories', obj), feed_copyright=self.__get_dynamic_attr(u'feed_copyright', obj), feed_guid=self.__get_dynamic_attr(u'feed_guid', obj), ttl=self.__get_dynamic_attr(u'ttl', obj), **self.feed_extra_kwargs(obj))
title_tmp = None
if (self.title_template is not None):
try:
title_tmp = loader.get_template(self.title_template)
except TemplateDoesNotExist:
pass
description_tmp = None
if (self.description_template is not None):
try:
description_tmp = loader.get_template(self.description_template)
except TemplateDoesNotExist:
pass
for item in self.__get_dynamic_attr(u'items', obj):
if (title_tmp is not None):
title = title_tmp.render(RequestContext(request, {u'obj': item, u'site': current_site}))
else:
title = self.__get_dynamic_attr(u'item_title', item)
if (description_tmp is not None):
description = description_tmp.render(RequestContext(request, {u'obj': item, u'site': current_site}))
else:
description = self.__get_dynamic_attr(u'item_description', item)
link = add_domain(current_site.domain, self.__get_dynamic_attr(u'item_link', item), request.is_secure())
enc = None
enc_url = self.__get_dynamic_attr(u'item_enclosure_url', item)
if enc_url:
enc = feedgenerator.Enclosure(url=smart_text(enc_url), length=smart_text(self.__get_dynamic_attr(u'item_enclosure_length', item)), mime_type=smart_text(self.__get_dynamic_attr(u'item_enclosure_mime_type', item)))
author_name = self.__get_dynamic_attr(u'item_author_name', item)
if (author_name is not None):
author_email = self.__get_dynamic_attr(u'item_author_email', item)
author_link = self.__get_dynamic_attr(u'item_author_link', item)
else:
author_email = author_link = None
pubdate = self.__get_dynamic_attr(u'item_pubdate', item)
if (pubdate and is_naive(pubdate)):
ltz = tzinfo.LocalTimezone(pubdate)
pubdate = pubdate.replace(tzinfo=ltz)
feed.add_item(title=title, link=link, description=description, unique_id=self.__get_dynamic_attr(u'item_guid', item, link), enclosure=enc, pubdate=pubdate, author_name=author_name, author_email=author_email, author_link=author_link, categories=self.__get_dynamic_attr(u'item_categories', item), item_copyright=self.__get_dynamic_attr(u'item_copyright', item), **self.item_extra_kwargs(item))
return feed
|
'Class method to parse get_comment_list/count/form and return a Node.'
| @classmethod
def handle_token(cls, parser, token):
| tokens = token.contents.split()
if (tokens[1] != 'for'):
raise template.TemplateSyntaxError(("Second argument in %r tag must be 'for'" % tokens[0]))
if (len(tokens) == 5):
if (tokens[3] != 'as'):
raise template.TemplateSyntaxError(("Third argument in %r must be 'as'" % tokens[0]))
return cls(object_expr=parser.compile_filter(tokens[2]), as_varname=tokens[4])
elif (len(tokens) == 6):
if (tokens[4] != 'as'):
raise template.TemplateSyntaxError(("Fourth argument in %r must be 'as'" % tokens[0]))
return cls(ctype=BaseCommentNode.lookup_content_type(tokens[2], tokens[0]), object_pk_expr=parser.compile_filter(tokens[3]), as_varname=tokens[5])
else:
raise template.TemplateSyntaxError(('%r tag requires 4 or 5 arguments' % tokens[0]))
|
'Subclasses should override this.'
| def get_context_value_from_queryset(self, context, qs):
| raise NotImplementedError
|
'Class method to parse render_comment_form and return a Node.'
| @classmethod
def handle_token(cls, parser, token):
| tokens = token.contents.split()
if (tokens[1] != 'for'):
raise template.TemplateSyntaxError(("Second argument in %r tag must be 'for'" % tokens[0]))
if (len(tokens) == 3):
return cls(object_expr=parser.compile_filter(tokens[2]))
elif (len(tokens) == 4):
return cls(ctype=BaseCommentNode.lookup_content_type(tokens[2], tokens[0]), object_pk_expr=parser.compile_filter(tokens[3]))
|
'Class method to parse render_comment_list and return a Node.'
| @classmethod
def handle_token(cls, parser, token):
| tokens = token.contents.split()
if (tokens[1] != 'for'):
raise template.TemplateSyntaxError(("Second argument in %r tag must be 'for'" % tokens[0]))
if (len(tokens) == 3):
return cls(object_expr=parser.compile_filter(tokens[2]))
elif (len(tokens) == 4):
return cls(ctype=BaseCommentNode.lookup_content_type(tokens[2], tokens[0]), object_pk_expr=parser.compile_filter(tokens[3]))
|
'Get a URL suitable for redirecting to the content object.'
| def get_content_object_url(self):
| return urlresolvers.reverse('comments-url-redirect', args=(self.content_type_id, self.object_pk))
|
'Get a dictionary that pulls together information about the poster
safely for both authenticated and non-authenticated comments.
This dict will have ``name``, ``email``, and ``url`` fields.'
| def _get_userinfo(self):
| if (not hasattr(self, '_userinfo')):
userinfo = {'name': self.user_name, 'email': self.user_email, 'url': self.user_url}
if self.user_id:
u = self.user
if u.email:
userinfo['email'] = u.email
if u.get_full_name():
userinfo['name'] = self.user.get_full_name()
elif (not self.user_name):
userinfo['name'] = u.get_username()
self._userinfo = userinfo
return self._userinfo
|
'Return this comment as plain text. Useful for emails.'
| def get_as_text(self):
| d = {'user': (self.user or self.name), 'date': self.submit_date, 'comment': self.comment, 'domain': self.site.domain, 'url': self.get_absolute_url()}
return (_('Posted by %(user)s at %(date)s\n\n%(comment)s\n\nhttp://%(domain)s%(url)s') % d)
|
'QuerySet for all comments currently in the moderation queue.'
| def in_moderation(self):
| return self.get_query_set().filter(is_public=False, is_removed=False)
|
'QuerySet for all comments for a particular model (either an instance or
a class).'
| def for_model(self, model):
| ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs
|
'Flag, approve, or remove some comments from an admin action. Actually
calls the `action` argument to perform the heavy lifting.'
| def _bulk_flag(self, request, queryset, action, done_message):
| n_comments = 0
for comment in queryset:
action(request, comment)
n_comments += 1
msg = ungettext(u'1 comment was successfully %(action)s.', u'%(count)s comments were successfully %(action)s.', n_comments)
self.message_user(request, (msg % {u'count': n_comments, u'action': done_message(n_comments)}))
|
'Return just those errors associated with security'
| def security_errors(self):
| errors = ErrorDict()
for f in ['honeypot', 'timestamp', 'security_hash']:
if (f in self.errors):
errors[f] = self.errors[f]
return errors
|
'Check the security hash.'
| def clean_security_hash(self):
| security_hash_dict = {'content_type': self.data.get('content_type', ''), 'object_pk': self.data.get('object_pk', ''), 'timestamp': self.data.get('timestamp', '')}
expected_hash = self.generate_security_hash(**security_hash_dict)
actual_hash = self.cleaned_data['security_hash']
if (not constant_time_compare(expected_hash, actual_hash)):
raise forms.ValidationError('Security hash check failed.')
return actual_hash
|
'Make sure the timestamp isn\'t too far (> 2 hours) in the past.'
| def clean_timestamp(self):
| ts = self.cleaned_data['timestamp']
if ((time.time() - ts) > ((2 * 60) * 60)):
raise forms.ValidationError('Timestamp check failed')
return ts
|
'Generate a dict of security data for "initial" data.'
| def generate_security_data(self):
| timestamp = int(time.time())
security_dict = {'content_type': str(self.target_object._meta), 'object_pk': str(self.target_object._get_pk_val()), 'timestamp': str(timestamp), 'security_hash': self.initial_security_hash(timestamp)}
return security_dict
|
'Generate the initial security hash from self.content_object
and a (unix) timestamp.'
| def initial_security_hash(self, timestamp):
| initial_security_dict = {'content_type': str(self.target_object._meta), 'object_pk': str(self.target_object._get_pk_val()), 'timestamp': str(timestamp)}
return self.generate_security_hash(**initial_security_dict)
|
'Generate a HMAC security hash from the provided info.'
| def generate_security_hash(self, content_type, object_pk, timestamp):
| info = (content_type, object_pk, timestamp)
key_salt = 'django.contrib.forms.CommentSecurityForm'
value = '-'.join(info)
return salted_hmac(key_salt, value).hexdigest()
|
'Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).'
| def get_comment_object(self):
| if (not self.is_valid()):
raise ValueError('get_comment_object may only be called on valid forms')
CommentModel = self.get_comment_model()
new = CommentModel(**self.get_comment_create_data())
new = self.check_for_duplicate_comment(new)
return new
|
'Get the comment model to create with this form. Subclasses in custom
comment apps should override this, get_comment_create_data, and perhaps
check_for_duplicate_comment to provide custom comment models.'
| def get_comment_model(self):
| return Comment
|
'Returns the dict of data to be used to create a comment. Subclasses in
custom comment apps that override get_comment_model can override this
method to add extra fields onto a custom comment model.'
| def get_comment_create_data(self):
| return dict(content_type=ContentType.objects.get_for_model(self.target_object), object_pk=force_text(self.target_object._get_pk_val()), user_name=self.cleaned_data['name'], user_email=self.cleaned_data['email'], user_url=self.cleaned_data['url'], comment=self.cleaned_data['comment'], submit_date=timezone.now(), site_id=settings.SITE_ID, is_public=True, is_removed=False)
|
'Check that a submitted comment isn\'t a duplicate. This might be caused
by someone posting a comment twice. If it is a dup, silently return the *previous* comment.'
| def check_for_duplicate_comment(self, new):
| possible_duplicates = self.get_comment_model()._default_manager.using(self.target_object._state.db).filter(content_type=new.content_type, object_pk=new.object_pk, user_name=new.user_name, user_email=new.user_email, user_url=new.user_url)
for old in possible_duplicates:
if ((old.submit_date.date() == new.submit_date.date()) and (old.comment == new.comment)):
return old
return new
|
'If COMMENTS_ALLOW_PROFANITIES is False, check that the comment doesn\'t
contain anything in PROFANITIES_LIST.'
| def clean_comment(self):
| comment = self.cleaned_data['comment']
if (settings.COMMENTS_ALLOW_PROFANITIES == False):
bad_words = [w for w in settings.PROFANITIES_LIST if (w in comment.lower())]
if bad_words:
raise forms.ValidationError((ungettext('Watch your mouth! The word %s is not allowed here.', 'Watch your mouth! The words %s are not allowed here.', len(bad_words)) % get_text_list([('"%s%s%s"' % (i[0], ('-' * (len(i) - 2)), i[(-1)])) for i in bad_words], ugettext('and'))))
return comment
|
'Check that nothing\'s been entered into the honeypot.'
| def clean_honeypot(self):
| value = self.cleaned_data['honeypot']
if value:
raise forms.ValidationError(self.fields['honeypot'].label)
return value
|
'Internal helper which will return a ``datetime.timedelta``
representing the time between ``now`` and ``then``. Assumes
``now`` is a ``datetime.date`` or ``datetime.datetime`` later
than ``then``.
If ``now`` and ``then`` are not of the same type due to one of
them being a ``datetime.date`` and the other being a
``datetime.datetime``, both will be coerced to
``datetime.date`` before calculating the delta.'
| def _get_delta(self, now, then):
| if (now.__class__ is not then.__class__):
now = datetime.date(now.year, now.month, now.day)
then = datetime.date(then.year, then.month, then.day)
if (now < then):
raise ValueError('Cannot determine moderation rules because date field is set to a value in the future')
return (now - then)
|
'Determine whether a given comment is allowed to be posted on
a given object.
Return ``True`` if the comment should be allowed, ``False
otherwise.'
| def allow(self, comment, content_object, request):
| if self.enable_field:
if (not getattr(content_object, self.enable_field)):
return False
if (self.auto_close_field and (self.close_after is not None)):
close_after_date = getattr(content_object, self.auto_close_field)
if ((close_after_date is not None) and (self._get_delta(timezone.now(), close_after_date).days >= self.close_after)):
return False
return True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.