code
stringlengths 12
2.05k
| label_name
stringlengths 6
8
| label
int64 0
95
|
---|---|---|
def publish(request, topic_id, pk=None):
initial = None
if pk: # todo: move to form
comment = get_object_or_404(
Comment.objects.for_access(user=request.user), pk=pk)
quote = markdown.quotify(comment.comment, comment.user.st.nickname)
initial = {'comment': quote}
user = request.user
topic = get_object_or_404(
Topic.objects.opened().for_access(user),
pk=topic_id)
form = CommentForm(
user=user,
topic=topic,
data=post_data(request),
initial=initial)
if is_post(request) and not request.is_limited() and form.is_valid():
if not user.st.update_post_hash(form.get_comment_hash()):
# Hashed comment may have not been saved yet
return redirect(
request.POST.get('next', None) or
Comment
.get_last_for_topic(topic_id)
.get_absolute_url())
comment = form.save()
comment_posted(comment=comment, mentions=form.mentions)
return redirect(request.POST.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/publish.html',
context={
'form': form,
'topic': topic}) | CWE-601 | 11 |
def create_class_from_xml_string(target_class, xml_string):
"""Creates an instance of the target class from a string.
:param target_class: The class which will be instantiated and populated
with the contents of the XML. This class must have a c_tag and a
c_namespace class variable.
:param xml_string: A string which contains valid XML. The root element
of the XML string should match the tag and namespace of the desired
class.
:return: An instance of the target class with members assigned according to
the contents of the XML - or None if the root XML tag and namespace did
not match those of the target class.
"""
if not isinstance(xml_string, six.binary_type):
xml_string = xml_string.encode('utf-8')
tree = ElementTree.fromstring(xml_string)
return create_class_from_element_tree(target_class, tree) | CWE-611 | 13 |
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(os.urandom(16))
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token | CWE-203 | 38 |
def test_logout_post(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar')
self.client.login(username=self.user.username, code=login_code.code)
response = self.client.post('/accounts/logout/?next=/accounts/login/')
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/accounts/login/')
self.assertTrue(response.wsgi_request.user.is_anonymous) | CWE-312 | 71 |
def help_page(page_slug: str) -> str:
"""Fava's included documentation."""
if page_slug not in HELP_PAGES:
abort(404)
html = markdown2.markdown_path(
(resource_path("help") / (page_slug + ".md")),
extras=["fenced-code-blocks", "tables", "header-ids"],
)
return render_template(
"_layout.html",
active_page="help",
page_slug=page_slug,
help_html=render_template_string(
html,
beancount_version=beancount_version,
fava_version=fava_version,
),
HELP_PAGES=HELP_PAGES,
) | CWE-79 | 1 |
async def handle_404(request, response):
if 'json' not in response.headers['Content-Type']:
if request.path.endswith('/'):
return web.HTTPFound(request.path.rstrip('/'))
return web.json_response({
"status": 404,
"message": "Page '{}' not found".format(request.path)
}, status=404)
return response | CWE-601 | 11 |
def get_pos_tagger(self):
from nltk.corpus import brown
regexp_tagger = RegexpTagger(
[
(r"^-?[0-9]+(.[0-9]+)?$", "CD"), # cardinal numbers
(r"(The|the|A|a|An|an)$", "AT"), # articles
(r".*able$", "JJ"), # adjectives
(r".*ness$", "NN"), # nouns formed from adjectives
(r".*ly$", "RB"), # adverbs
(r".*s$", "NNS"), # plural nouns
(r".*ing$", "VBG"), # gerunds
(r".*ed$", "VBD"), # past tense verbs
(r".*", "NN"), # nouns (default)
]
)
brown_train = brown.tagged_sents(categories="news")
unigram_tagger = UnigramTagger(brown_train, backoff=regexp_tagger)
bigram_tagger = BigramTagger(brown_train, backoff=unigram_tagger)
trigram_tagger = TrigramTagger(brown_train, backoff=bigram_tagger)
# Override particular words
main_tagger = RegexpTagger(
[(r"(A|a|An|an)$", "ex_quant"), (r"(Every|every|All|all)$", "univ_quant")],
backoff=trigram_tagger,
)
return main_tagger | CWE-1333 | 73 |
def get_valid_filename(value, replace_whitespace=True, chars=128):
"""
Returns the given string converted to a string that can be used for a clean
filename. Limits num characters to 128 max.
"""
if value[-1:] == u'.':
value = value[:-1]+u'_'
value = value.replace("/", "_").replace(":", "_").strip('\0')
if use_unidecode:
if config.config_unicode_filename:
value = (unidecode.unidecode(value))
else:
value = value.replace(u'§', u'SS')
value = value.replace(u'ß', u'ss')
value = unicodedata.normalize('NFKD', value)
re_slugify = re.compile(r'[\W\s-]', re.UNICODE)
value = re_slugify.sub('', value)
if replace_whitespace:
# *+:\"/<>? are replaced by _
value = re.sub(r'[*+:\\\"/<>?]+', u'_', value, flags=re.U)
# pipe has to be replaced with comma
value = re.sub(r'[|]+', u',', value, flags=re.U)
value = value[:chars].strip()
if not value:
raise ValueError("Filename cannot be empty")
return value | CWE-918 | 16 |
def sentences_stats(self, type, vId = None):
return self.sql_execute(self.prop_sentences_stats(type, vId)) | CWE-89 | 0 |
def feed_hot():
off = request.args.get("offset") or 0
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
hot_books = all_books.offset(off).limit(config.config_books_per_page)
entries = list()
for book in hot_books:
downloadBook = calibre_db.get_book(book.Downloads.book_id)
if downloadBook:
entries.append(
calibre_db.get_filtered_book(book.Downloads.book_id)
)
else:
ub.delete_download(book.Downloads.book_id)
numBooks = entries.__len__()
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1),
config.config_books_per_page, numBooks)
return render_xml_template('feed.xml', entries=entries, pagination=pagination) | CWE-918 | 16 |
def _copy_file(self, in_path, out_path):
if not os.path.exists(in_path):
raise errors.AnsibleFileNotFound("file or module does not exist: %s" % in_path)
try:
shutil.copyfile(in_path, out_path)
except shutil.Error:
traceback.print_exc()
raise errors.AnsibleError("failed to copy: %s and %s are the same" % (in_path, out_path))
except IOError:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file to %s" % out_path) | CWE-59 | 36 |
def check_read_formats(entry):
EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR', 'DJVU'}
bookformats = list()
if len(entry.data):
for ele in iter(entry.data):
if ele.format.upper() in EXTENSIONS_READER:
bookformats.append(ele.format.lower())
return bookformats | CWE-918 | 16 |
def ends_with(field: Term, value: str) -> Criterion:
return field.like(f"%{value}") | CWE-89 | 0 |
def author_list():
if current_user.check_visibility(constants.SIDEBAR_AUTHOR):
if current_user.get_view_property('author', 'dir') == 'desc':
order = db.Authors.sort.desc()
order_no = 0
else:
order = db.Authors.sort.asc()
order_no = 1
entries = calibre_db.session.query(db.Authors, func.count('books_authors_link.book').label('count')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_authors_link.author')).order_by(order).all()
charlist = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('char')) \
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
# If not creating a copy, readonly databases can not display authornames with "|" in it as changing the name
# starts a change session
autor_copy = copy.deepcopy(entries)
for entry in autor_copy:
entry.Authors.name = entry.Authors.name.replace('|', ',')
return render_title_template('list.html', entries=autor_copy, folder='web.books_list', charlist=charlist,
title=u"Authors", page="authorlist", data='author', order=order_no)
else:
abort(404) | CWE-918 | 16 |
def test_login_post(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar', next='/private/')
response = self.client.post('/accounts/login/code/', {
'code': login_code.code,
})
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/private/')
self.assertEqual(response.wsgi_request.user, self.user)
self.assertFalse(LoginCode.objects.filter(pk=login_code.pk).exists()) | CWE-312 | 71 |
def render_archived_books(page, sort_param):
order = sort_param[0] or []
archived_books = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
archived_filter = db.Books.id.in_(archived_book_ids)
entries, random, pagination = calibre_db.fill_indexpage_with_archived_books(page, db.Books,
0,
archived_filter,
order,
True,
False, 0)
name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'
pagename = "archived"
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=name, page=pagename, order=sort_param[1]) | CWE-918 | 16 |
def download(url, location):
"""
Download files to the specified location.
:param url: The file URL.
:type url: str
:param location: The absolute path to where the downloaded
file is to be stored.
:type location: str
"""
request = urllib2.urlopen(url)
try:
content = request.read()
fp = open(location, 'w+')
try:
fp.write(content)
finally:
fp.close()
finally:
request.close() | CWE-295 | 52 |
def check_username(username):
username = username.strip()
if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).scalar():
log.error(u"This username is already taken")
raise Exception (_(u"This username is already taken"))
return username | CWE-918 | 16 |
def get_cc_columns(filter_config_custom_read=False):
tmpcc = calibre_db.session.query(db.Custom_Columns)\
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
cc = []
r = None
if config.config_columns_to_ignore:
r = re.compile(config.config_columns_to_ignore)
for col in tmpcc:
if filter_config_custom_read and config.config_read_column and config.config_read_column == col.id:
continue
if r and r.match(col.name):
continue
cc.append(col)
return cc | CWE-918 | 16 |
def runserverobj(method, docs=None, dt=None, dn=None, arg=None, args=None):
frappe.desk.form.run_method.runserverobj(method, docs=docs, dt=dt, dn=dn, arg=arg, args=args) | CWE-79 | 1 |
def make_homeserver(self, reactor, clock):
self.mock_federation = Mock()
self.mock_registry = Mock()
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_registry.register_query_handler = register_query_handler
hs = self.setup_test_homeserver(
http_client=None,
resource_for_federation=Mock(),
federation_client=self.mock_federation,
federation_registry=self.mock_registry,
)
self.handler = hs.get_directory_handler()
self.store = hs.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
return hs | CWE-601 | 11 |
def adv_search_serie(q, include_series_inputs, exclude_series_inputs):
for serie in include_series_inputs:
q = q.filter(db.Books.series.any(db.Series.id == serie))
for serie in exclude_series_inputs:
q = q.filter(not_(db.Books.series.any(db.Series.id == serie)))
return q | CWE-918 | 16 |
def _get_element_ptr_tuplelike(parent, key):
typ = parent.typ
assert isinstance(typ, TupleLike)
if isinstance(typ, StructType):
assert isinstance(key, str)
subtype = typ.members[key]
attrs = list(typ.tuple_keys())
index = attrs.index(key)
annotation = key
else:
assert isinstance(key, int)
subtype = typ.members[key]
attrs = list(range(len(typ.members)))
index = key
annotation = None
# generated by empty() + make_setter
if parent.value == "~empty":
return IRnode.from_list("~empty", typ=subtype)
if parent.value == "multi":
assert parent.encoding != Encoding.ABI, "no abi-encoded literals"
return parent.args[index]
ofst = 0 # offset from parent start
if parent.encoding in (Encoding.ABI, Encoding.JSON_ABI):
if parent.location == STORAGE:
raise CompilerPanic("storage variables should not be abi encoded") # pragma: notest
member_t = typ.members[attrs[index]]
for i in range(index):
member_abi_t = typ.members[attrs[i]].abi_type
ofst += member_abi_t.embedded_static_size()
return _getelemptr_abi_helper(parent, member_t, ofst)
if parent.location.word_addressable:
for i in range(index):
ofst += typ.members[attrs[i]].storage_size_in_words
elif parent.location.byte_addressable:
for i in range(index):
ofst += typ.members[attrs[i]].memory_bytes_required
else:
raise CompilerPanic(f"bad location {parent.location}") # pragma: notest
return IRnode.from_list(
add_ofst(parent, ofst),
typ=subtype,
location=parent.location,
encoding=parent.encoding,
annotation=annotation,
) | CWE-190 | 19 |
def atom_timestamp(self):
return (self.timestamp.strftime('%Y-%m-%dT%H:%M:%S+00:00') or '') | CWE-918 | 16 |
def __init__(self):
self.reqparse = reqparse.RequestParser()
super(AuthenticatedService, self).__init__()
self.auth_dict = dict()
if current_user.is_authenticated():
roles_marshal = []
for role in current_user.roles:
roles_marshal.append(marshal(role.__dict__, ROLE_FIELDS))
roles_marshal.append({"name": current_user.role})
for role in RBACRole.roles[current_user.role].get_parents():
roles_marshal.append({"name": role.name})
self.auth_dict = {
"authenticated": True,
"user": current_user.email,
"roles": roles_marshal
}
else:
if app.config.get('FRONTED_BY_NGINX'):
url = "https://{}:{}{}".format(app.config.get('FQDN'), app.config.get('NGINX_PORT'), '/login')
else:
url = "http://{}:{}{}".format(app.config.get('FQDN'), app.config.get('API_PORT'), '/login')
self.auth_dict = {
"authenticated": False,
"user": None,
"url": url
} | CWE-601 | 11 |
def edit_all_cc_data(book_id, book, to_save):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
return edit_cc_data(book_id, book, to_save, cc) | CWE-918 | 16 |
def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
"""Send email with attachments"""
book = calibre_db.get_book(book_id)
if convert == 1:
# returns None if success, otherwise errormessage
return convert_book_format(book_id, calibrepath, u'epub', book_format.lower(), user_id, kindle_mail)
if convert == 2:
# returns None if success, otherwise errormessage
return convert_book_format(book_id, calibrepath, u'azw3', book_format.lower(), user_id, kindle_mail)
for entry in iter(book.data):
if entry.format.upper() == book_format.upper():
converted_file_name = entry.name + '.' + book_format.lower()
link = '<a href="{}">{}</a>'.format(url_for('web.show_book', book_id=book_id), escape(book.title))
EmailText = _(u"%(book)s send to Kindle", book=link)
WorkerThread.add(user_id, TaskEmail(_(u"Send to Kindle"), book.path, converted_file_name,
config.get_mail_settings(), kindle_mail,
EmailText, _(u'This e-mail has been sent via Calibre-Web.')))
return
return _(u"The requested file could not be read. Maybe wrong permissions?") | CWE-918 | 16 |
def test_login_unknown_code(self):
response = self.client.post('/accounts/login/code/', {
'code': 'unknown',
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form'].errors, {
'code': ['Login code is invalid. It might have expired.'],
}) | CWE-312 | 71 |
def format_runtime(runtime):
retVal = ""
if runtime.days:
retVal = format_unit(runtime.days, 'duration-day', length="long", locale=get_locale()) + ', '
mins, seconds = divmod(runtime.seconds, 60)
hours, minutes = divmod(mins, 60)
# ToDo: locale.number_symbols._data['timeSeparator'] -> localize time separator ?
if hours:
retVal += '{:d}:{:02d}:{:02d}s'.format(hours, minutes, seconds)
elif minutes:
retVal += '{:2d}:{:02d}s'.format(minutes, seconds)
else:
retVal += '{:2d}s'.format(seconds)
return retVal | CWE-918 | 16 |
def test_login_get(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar')
response = self.client.get('/accounts/login/code/', {
'code': login_code.code,
})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form'].cleaned_data['code'], login_code)
self.assertTrue(response.wsgi_request.user.is_anonymous)
self.assertTrue(LoginCode.objects.filter(pk=login_code.pk).exists()) | CWE-312 | 71 |
def __init__(self, text, book):
self.text = text
self.book = book | CWE-918 | 16 |
def test_equal_body(self):
# check server doesnt close connection when body is equal to
# cl header
to_send = tobytes(
"GET /equal_body HTTP/1.0\n"
"Connection: Keep-Alive\n"
"Content-Length: 0\n"
"\n"
)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
content_length = int(headers.get("content-length")) or None
self.assertEqual(content_length, 9)
self.assertline(line, "200", "OK", "HTTP/1.0")
self.assertEqual(content_length, len(response_body))
self.assertEqual(response_body, tobytes("abcdefghi"))
# remote does not close connection (keepalive header)
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.0") | CWE-444 | 41 |
def get_imports(self, *, prefix: str) -> Set[str]:
"""
Get a set of import strings that should be included when this property is used somewhere
Args:
prefix: A prefix to put before any relative (local) module names.
"""
imports = super().get_imports(prefix=prefix)
imports.update({"from datetime import datetime", "from typing import cast"})
return imports | CWE-94 | 14 |
def get_cc_columns(filter_config_custom_read=False):
tmpcc = calibre_db.session.query(db.Custom_Columns)\
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
cc = []
r = None
if config.config_columns_to_ignore:
r = re.compile(config.config_columns_to_ignore)
for col in tmpcc:
if filter_config_custom_read and config.config_read_column and config.config_read_column == col.id:
continue
if r and r.match(col.name):
continue
cc.append(col)
return cc | CWE-918 | 16 |
def feed_categoryindex():
shift = 0
off = int(request.args.get("offset") or 0)
entries = calibre_db.session.query(func.upper(func.substr(db.Tags.name, 1, 1)).label('id'))\
.join(db.books_tags_link).join(db.Books).filter(calibre_db.common_filters())\
.group_by(func.upper(func.substr(db.Tags.name, 1, 1))).all()
elements = []
if off == 0:
elements.append({'id': "00", 'name':_("All")})
shift = 1
for entry in entries[
off + shift - 1:
int(off + int(config.config_books_per_page) - shift)]:
elements.append({'id': entry.id, 'name': entry.id})
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
len(entries) + 1)
return render_xml_template('feed.xml',
letterelements=elements,
folder='opds.feed_letter_category',
pagination=pagination) | CWE-918 | 16 |
def MD5(self,data:str):
sha = hashlib.md5(bytes(data.encode()))
hash = str(sha.digest())
return self.__Salt(hash,salt=self.salt) | CWE-916 | 34 |
def field_names(ctx, rd, field):
'''
Get a list of all names for the specified field
Optional: ?library_id=<default library>
'''
db, library_id = get_library_data(ctx, rd)[:2]
return tuple(db.all_field_names(field)) | CWE-502 | 15 |
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible() | CWE-134 | 54 |
def execute_cmd(cmd, from_async=False):
"""execute a request as python module"""
for hook in frappe.get_hooks("override_whitelisted_methods", {}).get(cmd, []):
# override using the first hook
cmd = hook
break
# via server script
if run_server_script_api(cmd):
return None
try:
method = get_attr(cmd)
except Exception as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
if from_async:
method = method.queue
is_whitelisted(method)
is_valid_http_method(method)
return frappe.call(method, **frappe.form_dict) | CWE-79 | 1 |
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path):
new_path = os.path.join(calibre_path, new_authordir, new_titledir)
new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_authordir
try:
if original_filepath:
if not os.path.isdir(new_path):
os.makedirs(new_path)
shutil.move(os.path.normcase(original_filepath), os.path.normcase(os.path.join(new_path, db_filename)))
log.debug("Moving title: %s to %s/%s", original_filepath, new_path, new_name)
else:
# Check new path is not valid path
if not os.path.exists(new_path):
# move original path to new path
log.debug("Moving title: %s to %s", path, new_path)
shutil.move(os.path.normcase(path), os.path.normcase(new_path))
else: # path is valid copy only files to new location (merge)
log.info("Moving title: %s into existing: %s", path, new_path)
# Take all files and subfolder from old path (strange command)
for dir_name, __, file_list in os.walk(path):
for file in file_list:
shutil.move(os.path.normcase(os.path.join(dir_name, file)),
os.path.normcase(os.path.join(new_path + dir_name[len(path):], file)))
# change location in database to new author/title path
localbook.path = os.path.join(new_authordir, new_titledir).replace('\\','/')
except OSError as ex:
log.error("Rename title from: %s to %s: %s", path, new_path, ex)
log.debug(ex, exc_info=True)
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=path, dest=new_path, error=str(ex))
return False | CWE-918 | 16 |
def innerfn(fn):
global whitelisted, guest_methods, xss_safe_methods, allowed_http_methods_for_whitelisted_func
whitelisted.append(fn)
allowed_http_methods_for_whitelisted_func[fn] = methods
if allow_guest:
guest_methods.append(fn)
if xss_safe:
xss_safe_methods.append(fn)
return fn | CWE-79 | 1 |
def sanitized_join(path: str, root: pathlib.Path) -> pathlib.Path:
result = (root / path).absolute()
if not str(result).startswith(str(root) + "/"):
raise ValueError("resulting path is outside root")
return result | CWE-22 | 2 |
def read_templates(
self,
filenames: List[str],
custom_template_directory: Optional[str] = None,
autoescape: bool = False, | CWE-79 | 1 |
def parse_configuration_file(config_path):
"""
Read the config file for an experiment to get ParlAI settings.
:param config_path:
path to config
:return:
parsed configuration dictionary
"""
result = {}
result["configs"] = {}
with open(config_path) as f:
cfg = yaml.load(f.read(), Loader=yaml.FullLoader)
# get world path
result["world_path"] = cfg.get("world_module")
if not result["world_path"]:
raise ValueError("Did not specify world module")
result["overworld"] = cfg.get("overworld")
if not result["overworld"]:
raise ValueError("Did not specify overworld")
result["max_workers"] = cfg.get("max_workers")
if not result["max_workers"]:
raise ValueError("Did not specify max_workers")
result["task_name"] = cfg.get("task_name")
if not result["task_name"]:
raise ValueError("Did not specify task name")
task_world = cfg.get("tasks")
if task_world is None or len(task_world) == 0:
raise ValueError("task not in config file")
# get task file
for task_name, configuration in task_world.items():
if "task_world" not in configuration:
raise ValueError("{} does not specify a task".format(task_name))
result["configs"][task_name] = WorldConfig(
world_name=task_name,
onboarding_name=configuration.get("onboard_world"),
task_name=configuration.get("task_world"),
max_time_in_pool=configuration.get("timeout") or 300,
agents_required=configuration.get("agents_required") or 1,
backup_task=configuration.get("backup_task"),
)
# get world options, additional args
result["world_opt"] = cfg.get("opt", {})
result["additional_args"] = cfg.get("additional_args", {})
return result | CWE-502 | 15 |
def testPartialIndexInsert(self):
with ops.Graph().as_default() as G:
with ops.device('/cpu:0'):
x = array_ops.placeholder(dtypes.float32)
f = array_ops.placeholder(dtypes.float32)
v = array_ops.placeholder(dtypes.float32)
pi = array_ops.placeholder(dtypes.int64)
gi = array_ops.placeholder(dtypes.int64)
with ops.device(test.gpu_device_name()):
stager = data_flow_ops.MapStagingArea(
[dtypes.float32, dtypes.float32, dtypes.float32])
stage_xf = stager.put(pi, [x, f], [0, 2])
stage_v = stager.put(pi, [v], [1])
key, ret = stager.get(gi)
size = stager.size()
isize = stager.incomplete_size()
G.finalize()
with self.session(graph=G) as sess:
# 0 complete and incomplete entries
self.assertTrue(sess.run([size, isize]) == [0, 0])
# Stage key 0, x and f tuple entries
sess.run(stage_xf, feed_dict={pi: 0, x: 1, f: 2})
self.assertTrue(sess.run([size, isize]) == [0, 1])
# Stage key 1, x and f tuple entries
sess.run(stage_xf, feed_dict={pi: 1, x: 1, f: 2})
self.assertTrue(sess.run([size, isize]) == [0, 2])
# Now complete key 0 with tuple entry v
sess.run(stage_v, feed_dict={pi: 0, v: 1})
# 1 complete and 1 incomplete entry
self.assertTrue(sess.run([size, isize]) == [1, 1])
# We can now obtain tuple associated with key 0
self.assertTrue(sess.run([key, ret], feed_dict={gi: 0}) == [0, [1, 1, 2]])
# 0 complete and 1 incomplete entry
self.assertTrue(sess.run([size, isize]) == [0, 1])
# Now complete key 1 with tuple entry v
sess.run(stage_v, feed_dict={pi: 1, v: 3})
# We can now obtain tuple associated with key 1
self.assertTrue(sess.run([key, ret], feed_dict={gi: 1}) == [1, [1, 3, 2]]) | CWE-843 | 43 |
def load_djpeg(self):
# ALTERNATIVE: handle JPEGs via the IJG command line utilities
import tempfile, os
file = tempfile.mktemp()
os.system("djpeg %s >%s" % (self.filename, file))
try:
self.im = Image.core.open_ppm(file)
finally:
try: os.unlink(file)
except: pass
self.mode = self.im.mode
self.size = self.im.size
self.tile = [] | CWE-59 | 36 |
def test_http10_list(self):
body = string.ascii_letters
to_send = (
"GET /list HTTP/1.0\n"
"Connection: Keep-Alive\n"
"Content-Length: %d\n\n" % len(body)
)
to_send += body
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.0")
self.assertEqual(headers["content-length"], str(len(body)))
self.assertEqual(headers.get("connection"), "Keep-Alive")
self.assertEqual(response_body, tobytes(body))
# remote keeps connection open because it divined the content length
# from a length-1 list
self.sock.send(to_send)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.0") | CWE-444 | 41 |
def _moderate(request, pk, field_name, to_value, action=None, message=None):
topic = get_object_or_404(Topic, pk=pk)
if is_post(request):
count = (
Topic.objects
.filter(pk=pk)
.exclude(**{field_name: to_value})
.update(**{
field_name: to_value,
'reindex_at': timezone.now()}))
if count and action is not None:
Comment.create_moderation_action(
user=request.user,
topic=topic,
action=action)
if message is not None:
messages.info(request, message)
return redirect(request.POST.get(
'next', topic.get_absolute_url()))
return render(
request=request,
template_name='spirit/topic/moderate.html',
context={'topic': topic}) | CWE-601 | 11 |
def testInputPreProcessErrorBadFormat(self):
input_str = 'inputx=file[[v1]v2'
with self.assertRaises(RuntimeError):
saved_model_cli.preprocess_inputs_arg_string(input_str)
input_str = 'inputx:file'
with self.assertRaises(RuntimeError):
saved_model_cli.preprocess_inputs_arg_string(input_str)
input_str = 'inputx:np.zeros((5))'
with self.assertRaises(RuntimeError):
saved_model_cli.preprocess_input_exprs_arg_string(input_str) | CWE-94 | 14 |
def feed_publisher(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = calibre_db.fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1), 0,
db.Books,
db.Books.publishers.any(db.Publishers.id == book_id),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination) | CWE-918 | 16 |
def _rpsl_db_query_to_graphql_out(query: RPSLDatabaseQuery, info: GraphQLResolveInfo):
"""
Given an RPSL database query, execute it and clean up the output
to be suitable to return to GraphQL.
Main changes are:
- Enum handling
- Adding the asn and prefix fields if applicable
- Ensuring the right fields are returned as a list of strings or a string
"""
database_handler = info.context['request'].app.state.database_handler
if info.context.get('sql_trace'):
if 'sql_queries' not in info.context:
info.context['sql_queries'] = [repr(query)]
else:
info.context['sql_queries'].append(repr(query))
for row in database_handler.execute_query(query, refresh_on_error=True):
graphql_result = {snake_to_camel_case(k): v for k, v in row.items() if k != 'parsed_data'}
if 'object_text' in row:
graphql_result['objectText'] = remove_auth_hashes(row['object_text'])
if 'rpki_status' in row:
graphql_result['rpkiStatus'] = row['rpki_status']
if row.get('ip_first') is not None and row.get('prefix_length'):
graphql_result['prefix'] = row['ip_first'] + '/' + str(row['prefix_length'])
if row.get('asn_first') is not None and row.get('asn_first') == row.get('asn_last'):
graphql_result['asn'] = row['asn_first']
object_type = resolve_rpsl_object_type(row)
for key, value in row.get('parsed_data', dict()).items():
if key == 'auth':
value = remove_auth_hashes(value)
graphql_type = schema.graphql_types[object_type][key]
if graphql_type == 'String' and isinstance(value, list):
value = '\n'.join(value)
graphql_result[snake_to_camel_case(key)] = value
yield graphql_result | CWE-212 | 66 |
def _load_from(self, data: bytes) -> None:
if data.strip() == b'':
data = XMP_EMPTY # on some platforms lxml chokes on empty documents
def basic_parser(xml):
return parse(BytesIO(xml))
def strip_illegal_bytes_parser(xml):
return parse(BytesIO(re_xml_illegal_bytes.sub(b'', xml)))
def recovery_parser(xml):
parser = XMLParser(recover=True)
return parse(BytesIO(xml), parser)
def replace_with_empty_xmp(_xml=None):
log.warning("Error occurred parsing XMP, replacing with empty XMP.")
return basic_parser(XMP_EMPTY)
if self.overwrite_invalid_xml:
parsers: Iterable[Callable] = [
basic_parser,
strip_illegal_bytes_parser,
recovery_parser,
replace_with_empty_xmp,
]
else:
parsers = [basic_parser]
for parser in parsers:
try:
self._xmp = parser(data)
except (XMLSyntaxError if self.overwrite_invalid_xml else NeverRaise) as e:
if str(e).startswith("Start tag expected, '<' not found") or str(
e
).startswith("Document is empty"):
self._xmp = replace_with_empty_xmp()
break
else:
break
try:
pis = self._xmp.xpath('/processing-instruction()')
for pi in pis:
etree.strip_tags(self._xmp, pi.tag)
self._get_rdf_root()
except (Exception if self.overwrite_invalid_xml else NeverRaise) as e:
log.warning("Error occurred parsing XMP", exc_info=e)
self._xmp = replace_with_empty_xmp()
return | CWE-611 | 13 |
def test_received_error_from_parser(self):
from waitress.utilities import BadRequest
data = b"""\
GET /foobar HTTP/1.1
Transfer-Encoding: chunked
X-Foo: 1
garbage
"""
# header
result = self.parser.received(data)
# body
result = self.parser.received(data[result:])
self.assertEqual(result, 8)
self.assertTrue(self.parser.completed)
self.assertTrue(isinstance(self.parser.error, BadRequest)) | CWE-444 | 41 |
def __init__(self, pidfile=None):
if not pidfile:
self.pidfile = "/tmp/%s.pid" % self.__class__.__name__.lower()
else:
self.pidfile = pidfile | CWE-59 | 36 |
def feed_authorindex():
shift = 0
off = int(request.args.get("offset") or 0)
entries = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, 1, 1)).label('id'))\
.join(db.books_authors_link).join(db.Books).filter(calibre_db.common_filters())\
.group_by(func.upper(func.substr(db.Authors.sort, 1, 1))).all()
elements = []
if off == 0:
elements.append({'id': "00", 'name':_("All")})
shift = 1
for entry in entries[
off + shift - 1:
int(off + int(config.config_books_per_page) - shift)]:
elements.append({'id': entry.id, 'name': entry.id})
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
len(entries) + 1)
return render_xml_template('feed.xml',
letterelements=elements,
folder='opds.feed_letter_author',
pagination=pagination) | CWE-918 | 16 |
def test_request_body_too_large_with_wrong_cl_http10(self):
body = "a" * self.toobig
to_send = "GET / HTTP/1.0\n" "Content-Length: 5\n\n"
to_send += body
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb")
# first request succeeds (content-length 5)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.0")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
# server trusts the content-length header; no pipelining,
# so request fulfilled, extra bytes are thrown away
# connection has been closed
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp) | CWE-444 | 41 |
def toggle_archived(book_id):
is_archived = change_archived_books(book_id, message="Book {} archivebit toggled".format(book_id))
if is_archived:
remove_synced_book(book_id)
return "" | CWE-918 | 16 |
def test_logout_unknown_token(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar')
self.client.login(username=self.user.username, code=login_code.code)
response = self.client.post(
'/accounts-rest/logout/',
HTTP_AUTHORIZATION='Token unknown',
)
self.assertEqual(response.status_code, 200) | CWE-312 | 71 |
def stmt(self, stmt, msg=None):
mod = ast.Module([stmt])
self.mod(mod, msg) | CWE-125 | 47 |
def test_replay_banner_metadata(self, fmod):
""" Test adding metadata in replay banner (both framed and non-frame)
"""
resp = self.get('/test/20140103030321{0}/http://example.com/?example=1', fmod)
assert '<div>Custom Banner Here!</div>' in resp.text
assert '"some":"value"' in resp.text | CWE-79 | 1 |
def run_custom_method(doctype, name, custom_method):
"""cmd=run_custom_method&doctype={doctype}&name={name}&custom_method={custom_method}"""
doc = frappe.get_doc(doctype, name)
if getattr(doc, custom_method, frappe._dict()).is_whitelisted:
frappe.call(getattr(doc, custom_method), **frappe.local.form_dict)
else:
frappe.throw(_("Not permitted"), frappe.PermissionError) | CWE-79 | 1 |
def head_file(path):
try:
data_file, metadata = get_info(
path,
pathlib.Path(app.config["DATA_ROOT"])
)
stat = data_file.stat()
except (OSError, ValueError):
return flask.Response(
"Not Found",
404,
mimetype="text/plain",
)
response = flask.Response()
response.headers["Content-Length"] = str(stat.st_size)
generate_headers(
response.headers,
metadata["headers"],
)
return response | CWE-22 | 2 |
def parse_soap_enveloped_saml(text, body_class, header_class=None):
"""Parses a SOAP enveloped SAML thing and returns header parts and body
:param text: The SOAP object as XML
:return: header parts and body as saml.samlbase instances
"""
envelope = ElementTree.fromstring(text)
assert envelope.tag == '{%s}Envelope' % NAMESPACE
# print(len(envelope))
body = None
header = {}
for part in envelope:
# print(">",part.tag)
if part.tag == '{%s}Body' % NAMESPACE:
for sub in part:
try:
body = saml2.create_class_from_element_tree(body_class, sub)
except Exception:
raise Exception(
"Wrong body type (%s) in SOAP envelope" % sub.tag)
elif part.tag == '{%s}Header' % NAMESPACE:
if not header_class:
raise Exception("Header where I didn't expect one")
# print("--- HEADER ---")
for sub in part:
# print(">>",sub.tag)
for klass in header_class:
# print("?{%s}%s" % (klass.c_namespace,klass.c_tag))
if sub.tag == "{%s}%s" % (klass.c_namespace, klass.c_tag):
header[sub.tag] = \
saml2.create_class_from_element_tree(klass, sub)
break
return body, header | CWE-611 | 13 |
def delete(request, pk):
favorite = get_object_or_404(TopicFavorite, pk=pk, user=request.user)
favorite.delete()
return redirect(request.POST.get('next', favorite.topic.get_absolute_url())) | CWE-601 | 11 |
def sql_insert(self, sentence):
self.cursor.execute(sentence)
self.conn.commit()
return True | CWE-89 | 0 |
def camera_img_return_path(camera_unique_id, img_type, filename):
"""Return an image from stills or time-lapses"""
camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()
camera_path = assure_path_exists(
os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))
if img_type == 'still':
if camera.path_still:
path = camera.path_still
else:
path = os.path.join(camera_path, img_type)
elif img_type == 'timelapse':
if camera.path_timelapse:
path = camera.path_timelapse
else:
path = os.path.join(camera_path, img_type)
else:
return "Unknown Image Type"
if os.path.isdir(path):
files = (files for files in os.listdir(path)
if os.path.isfile(os.path.join(path, files)))
else:
files = []
if filename in files:
path_file = os.path.join(path, filename)
return send_file(path_file, mimetype='image/jpeg')
return "Image not found" | CWE-22 | 2 |
def test_roundtrip_file(self):
f = open(self.filename, 'wb')
self.x.tofile(f)
f.close()
# NB. doesn't work with flush+seek, due to use of C stdio
f = open(self.filename, 'rb')
y = np.fromfile(f, dtype=self.dtype)
f.close()
assert_array_equal(y, self.x.flat)
os.unlink(self.filename) | CWE-59 | 36 |
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
if not config_calibre_dir:
return False, False
dbpath = os.path.join(config_calibre_dir, "metadata.db")
if not os.path.exists(dbpath):
return False, False
try:
check_engine = create_engine('sqlite://',
echo=False,
isolation_level="SERIALIZABLE",
connect_args={'check_same_thread': False},
poolclass=StaticPool)
with check_engine.begin() as connection:
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
local_session = scoped_session(sessionmaker())
local_session.configure(bind=connection)
database_uuid = local_session().query(Library_Id).one_or_none()
# local_session.dispose()
check_engine.connect()
db_change = config_calibre_uuid != database_uuid.uuid
except Exception:
return False, False
return True, db_change | CWE-918 | 16 |
def _load_yamlconfig(self, configfile):
yamlconfig = None
try:
if self._recent_pyyaml():
# https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
# only for 5.1+
yamlconfig = yaml.load(open(configfile), Loader=yaml.FullLoader)
else:
yamlconfig = yaml.load(open(configfile))
except yaml.YAMLError as exc:
logger.error("Error in configuration file {0}:".format(configfile))
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
raise PystemonConfigException("error position: (%s:%s)" % (mark.line + 1, mark.column + 1))
for includes in yamlconfig.get("includes", []):
try:
logger.debug("loading include '{0}'".format(includes))
yamlconfig.update(yaml.load(open(includes)))
except Exception as e:
raise PystemonConfigException("failed to load '{0}': {1}".format(includes, e))
return yamlconfig | CWE-502 | 15 |
def get_recipe_from_file(self, file):
recipe_html = file.getvalue().decode("utf-8")
recipe_json, recipe_tree, html_data, images = get_recipe_from_source(recipe_html, 'CookBookApp', self.request)
recipe = Recipe.objects.create(
name=recipe_json['name'].strip(),
created_by=self.request.user, internal=True,
space=self.request.space)
try:
recipe.servings = re.findall('([0-9])+', recipe_json['recipeYield'])[0]
except Exception as e:
pass
try:
recipe.working_time = iso_duration_to_minutes(recipe_json['prepTime'])
recipe.waiting_time = iso_duration_to_minutes(recipe_json['cookTime'])
except Exception:
pass
step = Step.objects.create(instruction=recipe_json['recipeInstructions'], space=self.request.space, )
if 'nutrition' in recipe_json:
step.instruction = step.instruction + '\n\n' + recipe_json['nutrition']
step.save()
recipe.steps.add(step)
ingredient_parser = IngredientParser(self.request, True)
for ingredient in recipe_json['recipeIngredient']:
f = ingredient_parser.get_food(ingredient['ingredient']['text'])
u = ingredient_parser.get_unit(ingredient['unit']['text'])
step.ingredients.add(Ingredient.objects.create(
food=f, unit=u, amount=ingredient['amount'], note=ingredient['note'], space=self.request.space,
))
if len(images) > 0:
try:
response = requests.get(images[0])
self.import_recipe_image(recipe, BytesIO(response.content))
except Exception as e:
print('failed to import image ', str(e))
recipe.save()
return recipe | CWE-918 | 16 |
def test_invalid_sum(self):
pos = dict(lineno=2, col_offset=3)
m = ast.Module([ast.Expr(ast.expr(**pos), **pos)])
with self.assertRaises(TypeError) as cm:
compile(m, "<test>", "exec")
self.assertIn("but got <_ast.expr", str(cm.exception)) | CWE-125 | 47 |
def malt_regex_tagger():
from nltk.tag import RegexpTagger
_tagger = RegexpTagger(
[
(r"\.$", "."),
(r"\,$", ","),
(r"\?$", "?"), # fullstop, comma, Qmark
(r"\($", "("),
(r"\)$", ")"), # round brackets
(r"\[$", "["),
(r"\]$", "]"), # square brackets
(r"^-?[0-9]+(.[0-9]+)?$", "CD"), # cardinal numbers
(r"(The|the|A|a|An|an)$", "DT"), # articles
(r"(He|he|She|she|It|it|I|me|Me|You|you)$", "PRP"), # pronouns
(r"(His|his|Her|her|Its|its)$", "PRP$"), # possessive
(r"(my|Your|your|Yours|yours)$", "PRP$"), # possessive
(r"(on|On|in|In|at|At|since|Since)$", "IN"), # time prepopsitions
(r"(for|For|ago|Ago|before|Before)$", "IN"), # time prepopsitions
(r"(till|Till|until|Until)$", "IN"), # time prepopsitions
(r"(by|By|beside|Beside)$", "IN"), # space prepopsitions
(r"(under|Under|below|Below)$", "IN"), # space prepopsitions
(r"(over|Over|above|Above)$", "IN"), # space prepopsitions
(r"(across|Across|through|Through)$", "IN"), # space prepopsitions
(r"(into|Into|towards|Towards)$", "IN"), # space prepopsitions
(r"(onto|Onto|from|From)$", "IN"), # space prepopsitions
(r".*able$", "JJ"), # adjectives
(r".*ness$", "NN"), # nouns formed from adjectives
(r".*ly$", "RB"), # adverbs
(r".*s$", "NNS"), # plural nouns
(r".*ing$", "VBG"), # gerunds
(r".*ed$", "VBD"), # past tense verbs
(r".*", "NN"), # nouns (default)
]
)
return _tagger.tag | CWE-1333 | 73 |
def expr(self, node, msg=None, *, exc=ValueError):
mod = ast.Module([ast.Expr(node)])
self.mod(mod, msg, exc=exc) | CWE-125 | 47 |
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
resource_for_federation=Mock(), http_client=None
)
return hs | CWE-601 | 11 |
def join_in(request, topic_id):
# todo: replace by create_access()?
# This is for topic creators who left their own topics and want to join again
topic = get_object_or_404(
Topic,
pk=topic_id,
user=request.user,
category_id=settings.ST_TOPIC_PRIVATE_CATEGORY_PK)
form = TopicPrivateJoinForm(
topic=topic,
user=request.user,
data=post_data(request))
if is_post(request) and form.is_valid():
topic_private = form.save()
notify_access(user=form.get_user(), topic_private=topic_private)
return redirect(request.POST.get('next', topic.get_absolute_url()))
return render(
request=request,
template_name='spirit/topic/private/join.html',
context={
'topic': topic,
'form': form}) | CWE-601 | 11 |
def test_request_body_too_large_with_no_cl_http11(self):
body = "a" * self.toobig
to_send = "GET / HTTP/1.1\n\n"
to_send += body
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb")
# server trusts the content-length header (assumed 0)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.1")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
# server assumes pipelined requests due to http/1.1, and the first
# request was assumed c-l 0 because it had no content-length header,
# so entire body looks like the header of the subsequent request
# second response is an error response
line, headers, response_body = read_http(fp)
self.assertline(line, "431", "Request Header Fields Too Large", "HTTP/1.0")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
# connection has been closed
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp) | CWE-444 | 41 |
def clean_id(id_):
'''
Returns if the passed id is clean.
'''
if re.search(r'\.\.\{sep}'.format(sep=os.sep), id_):
return False
return True | CWE-22 | 2 |
def render_delete_book_result(book_format, jsonResponse, warning, book_id):
if book_format:
if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
"type": "success",
"format": book_format,
"message": _('Book Format Successfully Deleted')}])
else:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
if jsonResponse:
return json.dumps([warning, {"location": url_for('web.index'),
"type": "success",
"format": book_format,
"message": _('Book Successfully Deleted')}])
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index')) | CWE-918 | 16 |
def needs_clamp(t, encoding):
if encoding not in (Encoding.ABI, Encoding.JSON_ABI):
return False
if isinstance(t, (ByteArrayLike, DArrayType)):
if encoding == Encoding.JSON_ABI:
# don't have bytestring size bound from json, don't clamp
return False
return True
if isinstance(t, BaseType) and t.typ not in ("int256", "uint256", "bytes32"):
return True
if isinstance(t, SArrayType):
return needs_clamp(t.subtype, encoding)
if isinstance(t, TupleLike):
return any(needs_clamp(m, encoding) for m in t.tuple_members())
return False | CWE-190 | 19 |
def test_login(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar', next='/private/')
response = self.client.post('/accounts-rest/login/code/', {
'code': login_code.code,
})
self.assertEqual(response.status_code, 200)
self.assertFalse(LoginCode.objects.filter(pk=login_code.pk).exists())
token = Token.objects.filter(user=self.user).first()
self.assertIsNotNone(token)
self.assertEqual(response.data, {
'key': token.key,
'next': '/private/',
}) | CWE-312 | 71 |
def write_data_table(self, report, report_data, has_totals=True):
self.data.append([c["title"] for c in report.schema])
for datum in report_data:
datum = report.read_datum(datum)
self.data.append([format_data(data, format_iso_dates=True) for data in datum])
if has_totals:
for datum in report.get_totals(report_data):
datum = report.read_datum(datum)
self.data.append([format_data(data) for data in datum]) | CWE-1236 | 12 |
def __init__(self, hs):
super().__init__(hs)
self.http_client = SimpleHttpClient(hs)
# We create a blacklisting instance of SimpleHttpClient for contacting identity
# servers specified by clients
self.blacklisting_http_client = SimpleHttpClient(
hs, ip_blacklist=hs.config.federation_ip_range_blacklist
)
self.federation_http_client = hs.get_http_client()
self.hs = hs | CWE-601 | 11 |
def test_request_body_too_large_with_wrong_cl_http11_connclose(self):
body = "a" * self.toobig
to_send = "GET / HTTP/1.1\nContent-Length: 5\nConnection: close\n\n"
to_send += body
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
# server trusts the content-length header (5)
self.assertline(line, "200", "OK", "HTTP/1.1")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
# connection has been closed
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp) | CWE-444 | 41 |
def append(self, key, value):
self.dict.setdefault(_hkey(key), []).append(
value if isinstance(value, unicode) else str(value)) | CWE-93 | 33 |
def testInvalidSparseTensor(self):
with test_util.force_cpu():
shape = [2, 2]
val = [0]
dense = constant_op.constant(np.zeros(shape, dtype=np.int32))
for bad_idx in [
[[-1, 0]], # -1 is invalid.
[[1, 3]], # ...so is 3.
]:
sparse = sparse_tensor.SparseTensorValue(bad_idx, val, shape)
s = sparse_ops.sparse_add(sparse, dense)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"invalid index"):
self.evaluate(s) | CWE-476 | 46 |
def filter(self, names):
for name in [_hkey(n) for n in names]:
if name in self.dict:
del self.dict[name] | CWE-93 | 33 |
def upload_cover(request, book):
if 'btn-upload-cover' in request.files:
requested_file = request.files['btn-upload-cover']
# check for empty request
if requested_file.filename != '':
if not current_user.role_upload():
abort(403)
ret, message = helper.save_cover(requested_file, book.path)
if ret is True:
return True
else:
flash(message, category="error")
return False
return None | CWE-918 | 16 |
def test_basic_auth_invalid_credentials(self):
with self.assertRaises(InvalidStatusCode) as raised:
self.start_client(user_info=("hello", "ihateyou"))
self.assertEqual(raised.exception.status_code, 401) | CWE-203 | 38 |
def test_render_quoting(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
conditional_escape(w.render('test', 'http://example.com/<sometag>some text</sometag>')),
'<p class="url">Currently:<a href="http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example.com/<sometag>some text</sometag></a><br />Change:<input class="vURLField" name="test" type="url" value="http://example.com/<sometag>some text</sometag>" /></p>'
)
self.assertHTMLEqual(
conditional_escape(w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>')),
'<p class="url">Currently:<a href="http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E">http://example-äüö.com/<sometag>some text</sometag></a><br />Change:<input class="vURLField" name="test" type="url" value="http://example-äüö.com/<sometag>some text</sometag>" /></p>'
) | CWE-79 | 1 |
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable=None, in_data=None):
''' run a command on the zone '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
# We happily ignore privilege escalation
if executable == '/bin/sh':
executable = None
local_cmd = self._generate_cmd(executable, cmd)
vvv("EXEC %s" % (local_cmd), host=self.zone)
p = subprocess.Popen(local_cmd, shell=isinstance(local_cmd, basestring),
cwd=self.runner.basedir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return (p.returncode, '', stdout, stderr) | CWE-59 | 36 |
def _re_word_boundary(r: str) -> str:
"""
Adds word boundary characters to the start and end of an
expression to require that the match occur as a whole word,
but do so respecting the fact that strings starting or ending
with non-word characters will change word boundaries.
"""
# we can't use \b as it chokes on unicode. however \W seems to be okay
# as shorthand for [^0-9A-Za-z_].
return r"(^|\W)%s(\W|$)" % (r,) | CWE-331 | 72 |
def test_date_and_server(self):
to_send = "GET / HTTP/1.0\n" "Content-Length: 0\n\n"
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, echo = self._read_echo(fp)
self.assertline(line, "200", "OK", "HTTP/1.0")
self.assertEqual(headers.get("server"), "waitress")
self.assertTrue(headers.get("date")) | CWE-444 | 41 |
def test_multi_file(
self,
driver,
live_server,
freeze,
upload_file,
another_upload_file,
yet_another_upload_file, | CWE-22 | 2 |
def respond_error(self, context, exception):
context.respond_server_error()
stack = traceback.format_exc()
return """
<html>
<body>
<style>
body {
font-family: sans-serif;
color: #888;
text-align: center;
}
body pre {
width: 600px;
text-align: left;
margin: auto;
font-family: monospace;
}
</style>
<img src="/ajenti:static/main/error.jpeg" />
<br/>
<p>
Server error
</p>
<pre>
%s
</pre>
</body>
</html>
""" % stack | CWE-79 | 1 |
def check_send_to_kindle_with_converter(formats):
bookformats = list()
if 'EPUB' in formats and 'MOBI' not in formats:
bookformats.append({'format': 'Mobi',
'convert': 1,
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
orig='Epub',
format='Mobi')})
if 'AZW3' in formats and not 'MOBI' in formats:
bookformats.append({'format': 'Mobi',
'convert': 2,
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
orig='Azw3',
format='Mobi')})
return bookformats | CWE-918 | 16 |
def main():
app = create_app()
init_errorhandler()
app.register_blueprint(web)
app.register_blueprint(opds)
app.register_blueprint(jinjia)
app.register_blueprint(about)
app.register_blueprint(shelf)
app.register_blueprint(admi)
app.register_blueprint(remotelogin)
app.register_blueprint(meta)
app.register_blueprint(gdrive)
app.register_blueprint(editbook)
if kobo_available:
app.register_blueprint(kobo)
app.register_blueprint(kobo_auth)
if oauth_available:
app.register_blueprint(oauth)
success = web_server.start()
sys.exit(0 if success else 1) | CWE-918 | 16 |
def __init__(self, sourceName: str):
self.sourceName = sourceName
self.type = "file"
self.content = None | CWE-22 | 2 |
async def get_user_list(
*, client: Client, an_enum_value: List[AnEnum], some_date: Union[date, datetime], | CWE-94 | 14 |
def test_level_as_none(self):
body = [ast.ImportFrom(module='time',
names=[ast.alias(name='sleep')],
level=None,
lineno=0, col_offset=0)]
mod = ast.Module(body)
code = compile(mod, 'test', 'exec')
ns = {}
exec(code, ns)
self.assertIn('sleep', ns) | CWE-125 | 47 |
def sql_insert(self, sentence):
self.cursor.execute(sentence)
self.conn.commit()
return True | CWE-79 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.