code
stringlengths 12
2.05k
| label_name
stringclasses 5
values | label
int64 0
4
|
---|---|---|
void AveragePool(const float* input_data, const Dims<4>& input_dims,
int stride_width, int stride_height, int pad_width,
int pad_height, int kwidth, int kheight, float* output_data,
const Dims<4>& output_dims) {
float output_activation_min, output_activation_max;
GetActivationMinMax(Ac, &output_activation_min, &output_activation_max);
AveragePool(input_data, input_dims, stride_width, stride_height, pad_width,
pad_height, kwidth, kheight, output_activation_min,
output_activation_max, output_data, output_dims);
} | Base | 1 |
def test_create_catalog(self):
pardir = self.get_test_dir(erase=1)
cat = catalog.get_catalog(pardir,'c')
assert_(cat is not None)
cat.close()
self.remove_dir(pardir) | Class | 2 |
def test_show_student_extensions(self):
self.test_change_due_date()
url = reverse('show_student_extensions',
kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {'student': self.user1.username})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(json.loads(response.content), {
u'data': [{u'Extended Due Date': u'2013-12-30 00:00',
u'Unit': self.week1.display_name}],
u'header': [u'Unit', u'Extended Due Date'],
u'title': u'Due date extensions for %s (%s)' % (
self.user1.profile.name, self.user1.username)}) | Compound | 4 |
def testDictionary(self):
with ops.Graph().as_default() as G:
with ops.device('/cpu:0'):
x = array_ops.placeholder(dtypes.float32)
pi = array_ops.placeholder(dtypes.int64)
gi = array_ops.placeholder(dtypes.int64)
v = 2. * (array_ops.zeros([128, 128]) + x)
with ops.device(test.gpu_device_name()):
stager = data_flow_ops.MapStagingArea(
[dtypes.float32, dtypes.float32],
shapes=[[], [128, 128]],
names=['x', 'v'])
stage = stager.put(pi, {'x': x, 'v': v})
key, ret = stager.get(gi)
z = ret['x']
y = ret['v']
y = math_ops.reduce_max(z * math_ops.matmul(y, y))
G.finalize()
with self.session(graph=G) as sess:
sess.run(stage, feed_dict={x: -1, pi: 0})
for i in range(10):
_, yval = sess.run([stage, y], feed_dict={x: i, pi: i + 1, gi: i})
self.assertAllClose(
4 * (i - 1) * (i - 1) * (i - 1) * 128, yval, rtol=1e-4) | Base | 1 |
def CreateAuthenticator():
"""Create a packet autenticator. All RADIUS packets contain a sixteen
byte authenticator which is used to authenticate replies from the
RADIUS server and in the password hiding algorithm. This function
returns a suitable random string that can be used as an authenticator.
:return: valid packet authenticator
:rtype: binary string
"""
data = []
for i in range(16):
data.append(random.randrange(0, 256))
if six.PY3:
return bytes(data)
else:
return ''.join(chr(b) for b in data) | Class | 2 |
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
if user is None:
return FormattedResponse(status=HTTP_401_UNAUTHORIZED, d={'reason': 'login_failed'}, m='login_failed')
if not user.has_2fa():
return FormattedResponse(status=HTTP_401_UNAUTHORIZED, d={'reason': '2fa_not_enabled'}, m='2fa_not_enabled')
token = serializer.data['tfa']
if len(token) == 6:
if user.totp_device is not None and user.totp_device.validate_token(token):
return self.issue_token(user)
elif len(token) == 8:
for code in user.backup_codes:
if token == code.code:
code.delete()
return self.issue_token(user)
return self.issue_token(user) | Class | 2 |
def insensitive_contains(field: Term, value: str) -> Criterion:
return Upper(field).like(Upper(f"%{value}%")) | Base | 1 |
def test_property_from_data_array(self, mocker):
name = mocker.MagicMock()
required = mocker.MagicMock()
data = oai.Schema(type="array", items={"type": "number", "default": "0.0"},)
ListProperty = mocker.patch(f"{MODULE_NAME}.ListProperty")
FloatProperty = mocker.patch(f"{MODULE_NAME}.FloatProperty")
from openapi_python_client.parser.properties import property_from_data
p = property_from_data(name=name, required=required, data=data)
FloatProperty.assert_called_once_with(name=f"{name}_item", required=True, default="0.0")
ListProperty.assert_called_once_with(
name=name, required=required, default=None, inner_property=FloatProperty.return_value
)
assert p == ListProperty.return_value | Base | 1 |
def register(request, registration_form=RegistrationForm):
if request.user.is_authenticated:
return redirect(request.GET.get('next', reverse('spirit:user:update')))
form = registration_form(data=post_data(request))
if (is_post(request) and
not request.is_limited() and
form.is_valid()):
user = form.save()
send_activation_email(request, user)
messages.info(
request, _(
"We have sent you an email to %(email)s "
"so you can activate your account!") % {'email': form.get_email()})
# TODO: email-less activation
# if not settings.REGISTER_EMAIL_ACTIVATION_REQUIRED:
# login(request, user)
# return redirect(request.GET.get('next', reverse('spirit:user:update')))
return redirect(reverse(settings.LOGIN_URL))
return render(
request=request,
template_name='spirit/user/auth/register.html',
context={'form': form}) | Base | 1 |
def func_begin(self, name):
ctype = get_c_type(name)
self.emit("PyObject*", 0)
self.emit("ast2obj_%s(void* _o)" % (name), 0)
self.emit("{", 0)
self.emit("%s o = (%s)_o;" % (ctype, ctype), 1)
self.emit("PyObject *result = NULL, *value = NULL;", 1)
self.emit('if (!o) {', 1)
self.emit("Py_INCREF(Py_None);", 2)
self.emit('return Py_None;', 2)
self.emit("}", 1)
self.emit('', 0) | Base | 1 |
def test_broken_chunked_encoding(self):
control_line = "20;\r\n" # 20 hex = 32 dec
s = "This string has 32 characters.\r\n"
to_send = "GET / HTTP/1.1\nTransfer-Encoding: chunked\n\n"
to_send += control_line + s
# garbage in input
to_send += "GET / HTTP/1.1\nTransfer-Encoding: chunked\n\n"
to_send += control_line + s
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
# receiver caught garbage and turned it into a 400
self.assertline(line, "400", "Bad Request", "HTTP/1.1")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
self.assertEqual(
sorted(headers.keys()), ["connection", "content-length", "content-type", "date", "server"]
)
self.assertEqual(headers["content-type"], "text/plain")
# connection has been closed
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp) | Base | 1 |
def test_notfilelike_iobase_http11(self):
to_send = "GET /notfilelike_iobase HTTP/1.1\n\n"
to_send = tobytes(to_send)
self.connect()
for t in range(0, 2):
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.1")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
ct = headers["content-type"]
self.assertEqual(ct, "image/jpeg")
self.assertTrue(b"\377\330\377" in response_body) | Base | 1 |
def update_server_key(conf):
"""
Download the server's RSA key and store in the location
specified in the configuration.
:param conf: The consumer configuration object.
:type conf: dict
"""
host = conf['server']['host']
location = conf['server']['rsa_pub']
url = 'https://%s/pulp/static/rsa_pub.key' % host
try:
os.makedirs(os.path.dirname(location))
except OSError, e:
if e.errno != errno.EEXIST:
raise
download(url, location) | Base | 1 |
def test_show_unit_extensions(self):
self.test_change_due_date()
url = reverse('show_unit_extensions',
kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {'url': self.week1.location.to_deprecated_string()})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(json.loads(response.content), {
u'data': [{u'Extended Due Date': u'2013-12-30 00:00',
u'Full Name': self.user1.profile.name,
u'Username': self.user1.username}],
u'header': [u'Username', u'Full Name', u'Extended Due Date'],
u'title': u'Users with due date extensions for %s' %
self.week1.display_name}) | Compound | 4 |
def get_share_link(recipe):
url = recipe.storage.url + '/ocs/v2.php/apps/files_sharing/api/v1/shares?format=json&path=' + recipe.file_path # noqa: E501
headers = {
"OCS-APIRequest": "true",
"Content-Type": "application/json"
}
r = requests.get(
url,
headers=headers,
auth=HTTPBasicAuth(
recipe.storage.username, recipe.storage.password
)
)
response_json = r.json()
for element in response_json['ocs']['data']:
if element['share_type'] == '3':
return element['url']
return Nextcloud.create_share_link(recipe) | Base | 1 |
def test_parse_header_gardenpath(self):
data = b"""\
GET /foobar HTTP/8.4
foo: bar"""
self.parser.parse_header(data)
self.assertEqual(self.parser.first_line, b"GET /foobar HTTP/8.4")
self.assertEqual(self.parser.headers["FOO"], "bar") | Base | 1 |
async def on_exchange_third_party_invite_request(
self, room_id: str, event_dict: JsonDict | Class | 2 |
def lcase(s):
try:
return unidecode.unidecode(s.lower())
except Exception as ex:
log = logger.create()
log.error_or_exception(ex)
return s.lower() | Base | 1 |
def org_login(org_slug):
session["org_slug"] = current_org.slug
return redirect(url_for(".authorize", next=request.args.get("next", None))) | Base | 1 |
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = timezone.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return | Base | 1 |
def _register_function_args(context: Context, sig: FunctionSignature) -> List[IRnode]:
ret = []
# the type of the calldata
base_args_t = TupleType([arg.typ for arg in sig.base_args])
# tuple with the abi_encoded args
if sig.is_init_func:
base_args_ofst = IRnode(0, location=DATA, typ=base_args_t, encoding=Encoding.ABI)
else:
base_args_ofst = IRnode(4, location=CALLDATA, typ=base_args_t, encoding=Encoding.ABI)
for i, arg in enumerate(sig.base_args):
arg_ir = get_element_ptr(base_args_ofst, i)
if _should_decode(arg.typ):
# allocate a memory slot for it and copy
p = context.new_variable(arg.name, arg.typ, is_mutable=False)
dst = IRnode(p, typ=arg.typ, location=MEMORY)
copy_arg = make_setter(dst, arg_ir)
copy_arg.source_pos = getpos(arg.ast_source)
ret.append(copy_arg)
else:
# leave it in place
context.vars[arg.name] = VariableRecord(
name=arg.name,
pos=arg_ir,
typ=arg.typ,
mutable=False,
location=arg_ir.location,
encoding=Encoding.ABI,
)
return ret | Class | 2 |
def to_simple(self, data, options):
"""
For a piece of data, attempts to recognize it and provide a simplified
form of something complex.
This brings complex Python data structures down to native types of the
serialization format(s).
"""
if isinstance(data, (list, tuple)):
return [self.to_simple(item, options) for item in data]
if isinstance(data, dict):
return dict((key, self.to_simple(val, options)) for (key, val) in data.iteritems())
elif isinstance(data, Bundle):
return dict((key, self.to_simple(val, options)) for (key, val) in data.data.iteritems())
elif hasattr(data, 'dehydrated_type'):
if getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == False:
if data.full:
return self.to_simple(data.fk_resource, options)
else:
return self.to_simple(data.value, options)
elif getattr(data, 'dehydrated_type', None) == 'related' and data.is_m2m == True:
if data.full:
return [self.to_simple(bundle, options) for bundle in data.m2m_bundles]
else:
return [self.to_simple(val, options) for val in data.value]
else:
return self.to_simple(data.value, options)
elif isinstance(data, datetime.datetime):
return self.format_datetime(data)
elif isinstance(data, datetime.date):
return self.format_date(data)
elif isinstance(data, datetime.time):
return self.format_time(data)
elif isinstance(data, bool):
return data
elif type(data) in (long, int, float):
return data
elif data is None:
return None
else:
return force_unicode(data) | Class | 2 |
def stream_exists_backend(request, user_profile, stream_id, autosubscribe):
# type: (HttpRequest, UserProfile, int, bool) -> HttpResponse
try:
stream = get_and_validate_stream_by_id(stream_id, user_profile.realm)
except JsonableError:
stream = None
result = {"exists": bool(stream)}
if stream is not None:
recipient = get_recipient(Recipient.STREAM, stream.id)
if autosubscribe:
bulk_add_subscriptions([stream], [user_profile])
result["subscribed"] = is_active_subscriber(
user_profile=user_profile,
recipient=recipient)
return json_success(result) # results are ignored for HEAD requests
return json_response(data=result, status=404) | Class | 2 |
def do_download_file(book, book_format, client, data, headers):
if config.config_use_google_drive:
#startTime = time.time()
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
#log.debug('%s', time.time() - startTime)
if df:
return gd.do_gdrive_download(df, headers)
else:
abort(404)
else:
filename = os.path.join(config.config_calibre_dir, book.path)
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
# ToDo: improve error handling
log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
if client == "kobo" and book_format == "kepub":
headers["Content-Disposition"] = headers["Content-Disposition"].replace(".kepub", ".kepub.epub")
response = make_response(send_from_directory(filename, data.name + "." + book_format))
# ToDo Check headers parameter
for element in headers:
response.headers[element[0]] = element[1]
log.info('Downloading file: {}'.format(os.path.join(filename, data.name + "." + book_format)))
return response | Base | 1 |
def test_chunking_request_without_content(self):
header = tobytes("GET / HTTP/1.1\n" "Transfer-Encoding: chunked\n\n")
self.connect()
self.sock.send(header)
self.sock.send(b"0\r\n\r\n")
fp = self.sock.makefile("rb", 0)
line, headers, echo = self._read_echo(fp)
self.assertline(line, "200", "OK", "HTTP/1.1")
self.assertEqual(echo.body, b"")
self.assertEqual(echo.content_length, "0")
self.assertFalse("transfer-encoding" in headers) | Base | 1 |
def _parse_a_camel_date_time(data: Dict[str, Any]) -> Union[datetime, date]:
a_camel_date_time: Union[datetime, date]
try:
a_camel_date_time = datetime.fromisoformat(d["aCamelDateTime"])
return a_camel_date_time
except:
pass
a_camel_date_time = date.fromisoformat(d["aCamelDateTime"])
return a_camel_date_time | Base | 1 |
def adv_search_shelf(q, include_shelf_inputs, exclude_shelf_inputs):
q = q.outerjoin(ub.BookShelf, db.Books.id == ub.BookShelf.book_id)\
.filter(or_(ub.BookShelf.shelf == None, ub.BookShelf.shelf.notin_(exclude_shelf_inputs)))
if len(include_shelf_inputs) > 0:
q = q.filter(ub.BookShelf.shelf.in_(include_shelf_inputs))
return q | Base | 1 |
def _expand(self, key_material):
output = [b""]
counter = 1
while (self._algorithm.digest_size // 8) * len(output) < self._length:
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
h.update(output[-1])
h.update(self._info)
h.update(six.int2byte(counter))
output.append(h.finalize())
counter += 1
return b"".join(output)[:self._length] | Class | 2 |
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(http_client=None)
self.handler = hs.get_federation_handler()
self.store = hs.get_datastore()
return hs | Base | 1 |
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf")
if not token:
token = binascii.b2a_hex(os.urandom(16))
expires_days = 30 if self.current_user else None
self.set_cookie("_xsrf", token, expires_days=expires_days)
self._xsrf_token = token
return self._xsrf_token | Base | 1 |
def test_list_instructor_tasks_problem(self, act):
""" Test list task history for problem. """
act.return_value = self.tasks
url = reverse('list_instructor_tasks', kwargs={'course_id': self.course.id.to_deprecated_string()})
mock_factory = MockCompletionInfo()
with patch('instructor.views.instructor_task_helpers.get_task_completion_info') as mock_completion_info:
mock_completion_info.side_effect = mock_factory.mock_get_task_completion_info
response = self.client.get(url, {
'problem_location_str': self.problem_urlname,
})
self.assertEqual(response.status_code, 200)
# check response
self.assertTrue(act.called)
expected_tasks = [ftask.to_dict() for ftask in self.tasks]
actual_tasks = json.loads(response.content)['tasks']
for exp_task, act_task in zip(expected_tasks, actual_tasks):
self.assertDictEqual(exp_task, act_task)
self.assertEqual(actual_tasks, expected_tasks) | Compound | 4 |
def CreateID(self):
"""Create a packet ID. All RADIUS requests have a ID which is used to
identify a request. This is used to detect retries and replay attacks.
This function returns a suitable random number that can be used as ID.
:return: ID number
:rtype: integer
"""
return random.randrange(0, 256) | Class | 2 |
def test_list_course_role_members_bad_rolename(self):
""" Test with an invalid rolename parameter. """
url = reverse('list_course_role_members', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'rolename': 'robot-not-a-rolename',
})
self.assertEqual(response.status_code, 400) | Compound | 4 |
def test_nonexistent_catalog_is_none(self):
pardir = self.get_test_dir(erase=1)
cat = catalog.get_catalog(pardir,'r')
self.remove_dir(pardir)
assert_(cat is None) | Class | 2 |
def _access_endpoint(self, endpoint, args, status_code, msg):
"""
Asserts that accessing the given `endpoint` gets a response of `status_code`.
endpoint: string, endpoint for instructor dash API
args: dict, kwargs for `reverse` call
status_code: expected HTTP status code response
msg: message to display if assertion fails.
"""
url = reverse(endpoint, kwargs={'course_id': self.course.id.to_deprecated_string()})
if endpoint in ['send_email', 'students_update_enrollment', 'bulk_beta_modify_access']:
response = self.client.post(url, args)
else:
response = self.client.get(url, args)
self.assertEqual(
response.status_code,
status_code,
msg=msg
) | Compound | 4 |
def test_get_student_progress_url(self):
""" Test that progress_url is in the successful response. """
url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()})
url += "?unique_student_identifier={}".format(
quote(self.students[0].email.encode("utf-8"))
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
res_json = json.loads(response.content)
self.assertIn('progress_url', res_json) | Compound | 4 |
def test_get_anon_ids(self):
"""
Test the CSV output for the anonymized user ids.
"""
url = reverse('get_anon_ids', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(
'"User ID","Anonymized User ID","Course Specific Anonymized User ID"'
'\n"{user_id}","41","42"\n'.format(user_id=self.students[0].id)
))
self.assertTrue(
body.endswith('"{user_id}","41","42"\n'.format(user_id=self.students[-1].id))
) | Compound | 4 |
def test_digest_object_auth_info():
credentials = ("joe", "password")
host = None
request_uri = "/digest/nextnonce/"
headers = {}
response = httplib2.Response({})
response["www-authenticate"] = (
'Digest realm="myrealm", nonce="barney", '
'algorithm=MD5, qop="auth", stale=true'
)
response["authentication-info"] = 'nextnonce="fred"'
content = b""
d = httplib2.DigestAuthentication(
credentials, host, request_uri, headers, response, content, None
)
# Returns true to force a retry
assert not d.response(response, content)
assert d.challenge["nonce"] == "fred"
assert d.challenge["nc"] == 1 | Class | 2 |
def test_get_sale_records_features_csv(self):
"""
Test that the response from get_sale_records is in csv format.
"""
for i in range(2):
course_registration_code = CourseRegistrationCode(
code='sale_invoice{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
course_registration_code.save()
url = reverse(
'get_sale_records',
kwargs={'course_id': self.course.id.to_deprecated_string()}
)
response = self.client.get(url + '/csv', {})
self.assertEqual(response['Content-Type'], 'text/csv') | Compound | 4 |
def format_time(self, data):
"""
A hook to control how times are formatted.
Can be overridden at the ``Serializer`` level (``datetime_formatting``)
or globally (via ``settings.TASTYPIE_DATETIME_FORMATTING``).
Default is ``iso-8601``, which looks like "03:02:14".
"""
if self.datetime_formatting == 'rfc-2822':
return format_time(data)
return data.isoformat() | Class | 2 |
def decompile(self):
self.writeln("** Decompiling APK...", clr.OKBLUE)
with ZipFile(self.file) as zipped:
try:
dex = self.tempdir + "/" + self.apk.package + ".dex"
with open(dex, "wb") as classes:
classes.write(zipped.read("classes.dex"))
except Exception as e:
sys.exit(self.writeln(str(e), clr.WARNING))
dec = "%s %s -d %s --deobf" % (self.jadx, dex, self.tempdir)
os.system(dec)
return self.tempdir | Base | 1 |
def replace(self, key, value):
self.dict[_hkey(key)] = [value if isinstance(value, unicode) else
str(value)] | Base | 1 |
def load(self, stream):
'''read vault steam and return python object'''
return yaml.load(self.vault.decrypt(stream)) | Base | 1 |
def get(self, id, project=None):
if not project:
project = g.project
return (
Person.query.filter(Person.id == id)
.filter(Project.id == project.id)
.one()
) | Class | 2 |
def read_requirements(name):
project_root = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(project_root, name), 'rb') as f:
# remove whitespace and comments
g = (line.decode('utf-8').lstrip().split('#', 1)[0].rstrip() for line in f)
return [l for l in g if l] | Class | 2 |
def mark_all_as_read(request):
(TopicNotification.objects
.for_access(request.user)
.filter(is_read=False)
.update(is_read=True))
return redirect(request.POST.get(
'next', reverse('spirit:topic:notification:index'))) | Base | 1 |
async def ignore_global(self, ctx, command: str.lower):
"""
Globally ignore or unignore the specified action.
The bot will no longer respond to these actions.
"""
try:
await self.config.get_raw("custom", command)
except KeyError:
await self.config.set_raw("custom", command, value=None)
else:
await self.config.clear_raw("custom", command)
await ctx.tick()
| Base | 1 |
def save(self):
self['__meta__'] = {
'httpie': __version__
}
if self.helpurl:
self['__meta__']['help'] = self.helpurl
if self.about:
self['__meta__']['about'] = self.about
self.ensure_directory()
json_string = json.dumps(
obj=self,
indent=4,
sort_keys=True,
ensure_ascii=True,
)
self.path.write_text(json_string + '\n', encoding=UTF8) | Class | 2 |
def test_list(self):
self.user.session_set.create(session_key='ABC123', ip='127.0.0.1',
expire_date=datetime.now() + timedelta(days=1),
user_agent='Firefox')
response = self.client.get(reverse('user_sessions:session_list'))
self.assertContains(response, 'Active Sessions')
self.assertContains(response, 'End Session', 3)
self.assertContains(response, 'Firefox') | Class | 2 |
def test_invalid_sum(self):
pos = dict(lineno=2, col_offset=3)
m = ast.Module([ast.Expr(ast.expr(**pos), **pos)])
with self.assertRaises(TypeError) as cm:
compile(m, "<test>", "exec")
self.assertIn("but got <_ast.expr", str(cm.exception)) | Base | 1 |
def _register_function_args(context: Context, sig: FunctionSignature) -> List[IRnode]:
ret = []
# the type of the calldata
base_args_t = TupleType([arg.typ for arg in sig.base_args])
# tuple with the abi_encoded args
if sig.is_init_func:
base_args_ofst = IRnode(0, location=DATA, typ=base_args_t, encoding=Encoding.ABI)
else:
base_args_ofst = IRnode(4, location=CALLDATA, typ=base_args_t, encoding=Encoding.ABI)
for i, arg in enumerate(sig.base_args):
arg_ir = get_element_ptr(base_args_ofst, i)
if _should_decode(arg.typ):
# allocate a memory slot for it and copy
p = context.new_variable(arg.name, arg.typ, is_mutable=False)
dst = IRnode(p, typ=arg.typ, location=MEMORY)
copy_arg = make_setter(dst, arg_ir)
copy_arg.source_pos = getpos(arg.ast_source)
ret.append(copy_arg)
else:
# leave it in place
context.vars[arg.name] = VariableRecord(
name=arg.name,
pos=arg_ir,
typ=arg.typ,
mutable=False,
location=arg_ir.location,
encoding=Encoding.ABI,
)
return ret | Base | 1 |
def test_send_empty_body(self):
to_send = "GET / HTTP/1.0\n" "Content-Length: 0\n\n"
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, echo = self._read_echo(fp)
self.assertline(line, "200", "OK", "HTTP/1.0")
self.assertEqual(echo.content_length, "0")
self.assertEqual(echo.body, b"") | Base | 1 |
def test_received_preq_completed_connection_close(self):
inst, sock, map = self._makeOneWithMap()
inst.server = DummyServer()
preq = DummyParser()
inst.request = preq
preq.completed = True
preq.empty = True
preq.connection_close = True
inst.received(b"GET / HTTP/1.1\n\n" + b"a" * 50000)
self.assertEqual(inst.request, None)
self.assertEqual(inst.server.tasks, []) | Base | 1 |
def test_reset_entrance_exam_all_student_attempts(self, act):
""" Test reset all student attempts for entrance exam. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'all_students': True,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called) | Compound | 4 |
def test_confirmation_obj_not_exist_error(self) -> None:
"""Since the key is a param input by the user to the registration endpoint,
if it inserts an invalid value, the confirmation object won't be found. This
tests if, in that scenario, we handle the exception by redirecting the user to
the confirmation_link_expired_error page.
"""
email = self.nonreg_email("alice")
password = "password"
realm = get_realm("zulip")
inviter = self.example_user("iago")
prereg_user = PreregistrationUser.objects.create(
email=email, referred_by=inviter, realm=realm
)
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
registration_key = "invalid_confirmation_key"
url = "/accounts/register/"
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assertEqual(response.status_code, 404)
self.assert_in_response("The registration link has expired or is not valid.", response)
registration_key = confirmation_link.split("/")[-1]
response = self.client_post(
url, {"key": registration_key, "from_confirmation": 1, "full_nme": "alice"}
)
self.assert_in_success_response(["We just need you to do one last thing."], response)
response = self.submit_reg_form_for_user(email, password, key=registration_key)
self.assertEqual(response.status_code, 302) | Base | 1 |
def test_open_with_filename(self):
tmpname = mktemp('', 'mmap')
fp = memmap(tmpname, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
del fp
os.unlink(tmpname) | Base | 1 |
def expr(self, node, msg=None, *, exc=ValueError):
mod = ast.Module([ast.Expr(node)])
self.mod(mod, msg, exc=exc) | Base | 1 |
def publish(self, id_, identity, uow=None):
"""Publish a draft.
Idea:
- Get the draft from the data layer (draft is not passed in)
- Validate it more strictly than when it was originally saved
(drafts can be incomplete but only complete drafts can be turned
into records)
- Create or update associated (published) record with data
"""
self.require_permission(identity, "publish")
# Get the draft
draft = self.draft_cls.pid.resolve(id_, registered_only=False)
# Validate the draft strictly - since a draft can be saved with errors
# we do a strict validation here to make sure only valid drafts can be
# published.
self._validate_draft(identity, draft)
# Create the record from the draft
latest_id = draft.versions.latest_id
record = self.record_cls.publish(draft)
# Run components
self.run_components(
'publish', identity, draft=draft, record=record, uow=uow)
# Commit and index
uow.register(RecordCommitOp(record, indexer=self.indexer))
uow.register(RecordDeleteOp(draft, force=False, indexer=self.indexer))
if latest_id:
self._reindex_latest(latest_id, uow=uow)
return self.result_item(
self, identity, record, links_tpl=self.links_item_tpl) | Class | 2 |
def verify_cert_against_ca(self, filename, entry):
"""
check that a certificate validates against the ca cert,
and that it has not expired.
"""
chaincert = self.CAs[self.cert_specs[entry.get('name')]['ca']].get('chaincert')
cert = self.data + filename
cmd = "openssl verify -CAfile %s %s" % (chaincert, cert)
res = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT).stdout.read()
if res == cert + ": OK\n":
return True
return False | Class | 2 |
def class_instances_from_soap_enveloped_saml_thingies(text, modules):
"""Parses a SOAP enveloped header and body SAML thing and returns the
thing as a dictionary class instance.
:param text: The SOAP object as XML
:param modules: modules representing xsd schemas
:return: The body and headers as class instances
"""
try:
envelope = ElementTree.fromstring(text)
except Exception as exc:
raise XmlParseError("%s" % exc)
assert envelope.tag == '{%s}Envelope' % soapenv.NAMESPACE
assert len(envelope) >= 1
env = {"header": [], "body": None}
for part in envelope:
if part.tag == '{%s}Body' % soapenv.NAMESPACE:
assert len(part) == 1
env["body"] = instanciate_class(part[0], modules)
elif part.tag == "{%s}Header" % soapenv.NAMESPACE:
for item in part:
env["header"].append(instanciate_class(item, modules))
return env | Base | 1 |
def generate_config_section(self, config_dir_path, server_name, **kwargs):
return """\
## Federation ##
# Restrict federation to the following whitelist of domains.
# N.B. we recommend also firewalling your federation listener to limit
# inbound federation traffic as early as possible, rather than relying
# purely on this application-layer restriction. If not specified, the
# default is to whitelist everything.
#
#federation_domain_whitelist:
# - lon.example.com
# - nyc.example.com
# - syd.example.com
# Prevent federation requests from being sent to the following
# blacklist IP address CIDR ranges. If this option is not specified, or
# specified with an empty list, no ip range blacklist will be enforced.
#
# As of Synapse v1.4.0 this option also affects any outbound requests to identity
# servers provided by user input.
#
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
# listed here, since they correspond to unroutable addresses.)
#
federation_ip_range_blacklist:
- '127.0.0.0/8'
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
# Report prometheus metrics on the age of PDUs being sent to and received from
# the following domains. This can be used to give an idea of "delay" on inbound
# and outbound federation, though be aware that any delay can be due to problems
# at either end or with the intermediate network.
#
# By default, no domains are monitored in this way.
#
#federation_metrics_domains:
# - matrix.org
# - example.com
""" | Base | 1 |
def _get_object_element(dataset, seq_no, rel_path, download_link):
"""If rel_path and download_link are not None, we are called from scope.
Otherwise we are called from ID and need to run SQL query to fetch these attrs."""
if rel_path is None:
query = "SELECT rel_path, download_link FROM " + \
dataset + \
" WHERE sequence_no = %s"
cnx = mysql.connector.connect(user=DB_USER,
password=DB_PASSWORD,
host=DB_HOST,
database=DB_DBNAME,
port=DB_PORT)
cursor = cnx.cursor()
cursor.execute(query, (seq_no,))
row = cursor.fetchone()
if not row:
return None
rel_path, download_link = row[0], row[1]
if LOCAL_OBJ_URI:
src_uri = 'file://' + os.path.join(DATAROOT, dataset, rel_path)
else:
src_uri = url_for('.get_object_src_http', dataset=dataset, rel_path=rel_path)
return '<object id={} src={} hyperfind.external-link={} />' \
.format(
quoteattr(url_for('.get_object_id', dataset=dataset, seq_no=seq_no)),
quoteattr(src_uri),
quoteattr(download_link)) | Base | 1 |
def save_cover(img, book_path):
content_type = img.headers.get('content-type')
if use_IM:
if content_type not in ('image/jpeg', 'image/png', 'image/webp', 'image/bmp'):
log.error("Only jpg/jpeg/png/webp/bmp files are supported as coverfile")
return False, _("Only jpg/jpeg/png/webp/bmp files are supported as coverfile")
# convert to jpg because calibre only supports jpg
if content_type != 'image/jpg':
try:
if hasattr(img, 'stream'):
imgc = Image(blob=img.stream)
else:
imgc = Image(blob=io.BytesIO(img.content))
imgc.format = 'jpeg'
imgc.transform_colorspace("rgb")
img = imgc
except (BlobError, MissingDelegateError):
log.error("Invalid cover file content")
return False, _("Invalid cover file content")
else:
if content_type not in 'image/jpeg':
log.error("Only jpg/jpeg files are supported as coverfile")
return False, _("Only jpg/jpeg files are supported as coverfile")
if config.config_use_google_drive:
tmp_dir = os.path.join(gettempdir(), 'calibre_web')
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
ret, message = save_cover_from_filestorage(tmp_dir, "uploaded_cover.jpg", img)
if ret is True:
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg').replace("\\","/"),
os.path.join(tmp_dir, "uploaded_cover.jpg"))
log.info("Cover is saved on Google Drive")
return True, None
else:
return False, message
else:
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img) | Base | 1 |
def makeTrustRoot(self):
# If this option is specified, use a specific root CA cert. This is useful for testing when it's not
# practical to get the client cert signed by a real root CA but should never be used on a production server.
caCertFilename = self.sydent.cfg.get('http', 'replication.https.cacert')
if len(caCertFilename) > 0:
try:
fp = open(caCertFilename)
caCert = twisted.internet.ssl.Certificate.loadPEM(fp.read())
fp.close()
except:
logger.warn("Failed to open CA cert file %s", caCertFilename)
raise
logger.warn("Using custom CA cert file: %s", caCertFilename)
return twisted.internet._sslverify.OpenSSLCertificateAuthorities([caCert.original])
else:
return twisted.internet.ssl.OpenSSLDefaultPaths() | Base | 1 |
def _rpsl_db_query_to_graphql_out(query: RPSLDatabaseQuery, info: GraphQLResolveInfo):
"""
Given an RPSL database query, execute it and clean up the output
to be suitable to return to GraphQL.
Main changes are:
- Enum handling
- Adding the asn and prefix fields if applicable
- Ensuring the right fields are returned as a list of strings or a string
"""
database_handler = info.context['request'].app.state.database_handler
if info.context.get('sql_trace'):
if 'sql_queries' not in info.context:
info.context['sql_queries'] = [repr(query)]
else:
info.context['sql_queries'].append(repr(query))
for row in database_handler.execute_query(query, refresh_on_error=True):
graphql_result = {snake_to_camel_case(k): v for k, v in row.items() if k != 'parsed_data'}
if 'object_text' in row:
graphql_result['objectText'] = remove_auth_hashes(row['object_text'])
if 'rpki_status' in row:
graphql_result['rpkiStatus'] = row['rpki_status']
if row.get('ip_first') is not None and row.get('prefix_length'):
graphql_result['prefix'] = row['ip_first'] + '/' + str(row['prefix_length'])
if row.get('asn_first') is not None and row.get('asn_first') == row.get('asn_last'):
graphql_result['asn'] = row['asn_first']
object_type = resolve_rpsl_object_type(row)
for key, value in row.get('parsed_data', dict()).items():
if key == 'auth':
value = remove_auth_hashes(value)
graphql_type = schema.graphql_types[object_type][key]
if graphql_type == 'String' and isinstance(value, list):
value = '\n'.join(value)
graphql_result[snake_to_camel_case(key)] = value
yield graphql_result | Base | 1 |
def test_modify_access_with_inactive_user(self):
self.other_user.is_active = False
self.other_user.save() # pylint: disable=no-member
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_user.username,
'rolename': 'beta',
'action': 'allow',
})
self.assertEqual(response.status_code, 200)
expected = {
'unique_student_identifier': self.other_user.username,
'inactiveUser': True,
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected) | Compound | 4 |
def test_exchange_revoked_invite(self):
user_id = self.register_user("kermit", "test")
tok = self.login("kermit", "test")
room_id = self.helper.create_room_as(room_creator=user_id, tok=tok)
# Send a 3PID invite event with an empty body so it's considered as a revoked one.
invite_token = "sometoken"
self.helper.send_state(
room_id=room_id,
event_type=EventTypes.ThirdPartyInvite,
state_key=invite_token,
body={},
tok=tok,
)
d = self.handler.on_exchange_third_party_invite_request(
room_id=room_id,
event_dict={
"type": EventTypes.Member,
"room_id": room_id,
"sender": user_id,
"state_key": "@someone:example.org",
"content": {
"membership": "invite",
"third_party_invite": {
"display_name": "alice",
"signed": {
"mxid": "@alice:localhost",
"token": invite_token,
"signatures": {
"magic.forest": {
"ed25519:3": "fQpGIW1Snz+pwLZu6sTy2aHy/DYWWTspTJRPyNp0PKkymfIsNffysMl6ObMMFdIJhk6g6pwlIqZ54rxo8SLmAg"
}
},
},
},
},
},
)
failure = self.get_failure(d, AuthError).value
self.assertEqual(failure.code, 403, failure)
self.assertEqual(failure.errcode, Codes.FORBIDDEN, failure)
self.assertEqual(failure.msg, "You are not invited to this room.") | Class | 2 |
def table_xchange_author_title():
vals = request.get_json().get('xchange')
if vals:
for val in vals:
modif_date = False
book = calibre_db.get_book(val)
authors = book.title
book.authors = calibre_db.order_authors([book])
author_names = []
for authr in book.authors:
author_names.append(authr.name.replace('|', ','))
title_change = handle_title_on_edit(book, " ".join(author_names))
input_authors, authorchange, renamed = handle_author_on_edit(book, authors)
if authorchange or title_change:
edited_books_id = book.id
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
if edited_books_id:
helper.update_dir_structure(edited_books_id, config.config_calibre_dir, input_authors[0],
renamed_author=renamed)
if modif_date:
book.last_modified = datetime.utcnow()
try:
calibre_db.session.commit()
except (OperationalError, IntegrityError) as e:
calibre_db.session.rollback()
log.error_or_exception("Database error: %s", e)
return json.dumps({'success': False})
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
return json.dumps({'success': True})
return "" | Base | 1 |
def test_security_check(self, password='password'):
login_url = reverse('login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response.url,
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response.url,
"%s should be allowed" % good_url) | Base | 1 |
def test_send_event_single_sender(self):
"""Test that using a single federation sender worker correctly sends a
new event.
"""
mock_client = Mock(spec=["put_json"])
mock_client.put_json.return_value = make_awaitable({})
self.make_worker_hs(
"synapse.app.federation_sender",
{"send_federation": True},
http_client=mock_client,
)
user = self.register_user("user", "pass")
token = self.login("user", "pass")
room = self.create_room_with_remote_server(user, token)
mock_client.put_json.reset_mock()
self.create_and_send_event(room, UserID.from_string(user))
self.replicate()
# Assert that the event was sent out over federation.
mock_client.put_json.assert_called()
self.assertEqual(mock_client.put_json.call_args[0][0], "other_server")
self.assertTrue(mock_client.put_json.call_args[1]["data"].get("pdus")) | Base | 1 |
def test_credits_view_json(self):
response = self.get_credits("json")
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(
response.content.decode(),
[{"Czech": [["[email protected]", "Weblate Test", 1]]}],
) | Base | 1 |
def new_type_to_old_type(typ: new.BasePrimitive) -> old.NodeType:
if isinstance(typ, new.BoolDefinition):
return old.BaseType("bool")
if isinstance(typ, new.AddressDefinition):
return old.BaseType("address")
if isinstance(typ, new.InterfaceDefinition):
return old.InterfaceType(typ._id)
if isinstance(typ, new.BytesMDefinition):
m = typ._length # type: ignore
return old.BaseType(f"bytes{m}")
if isinstance(typ, new.BytesArrayDefinition):
return old.ByteArrayType(typ.length)
if isinstance(typ, new.StringDefinition):
return old.StringType(typ.length)
if isinstance(typ, new.DecimalDefinition):
return old.BaseType("decimal")
if isinstance(typ, new.SignedIntegerAbstractType):
bits = typ._bits # type: ignore
return old.BaseType("int" + str(bits))
if isinstance(typ, new.UnsignedIntegerAbstractType):
bits = typ._bits # type: ignore
return old.BaseType("uint" + str(bits))
if isinstance(typ, new.ArrayDefinition):
return old.SArrayType(new_type_to_old_type(typ.value_type), typ.length)
if isinstance(typ, new.DynamicArrayDefinition):
return old.DArrayType(new_type_to_old_type(typ.value_type), typ.length)
if isinstance(typ, new.TupleDefinition):
return old.TupleType(typ.value_type)
if isinstance(typ, new.StructDefinition):
return old.StructType(
{n: new_type_to_old_type(t) for (n, t) in typ.members.items()}, typ._id
)
raise InvalidType(f"unknown type {typ}") | Base | 1 |
def log_request(handler):
"""log a bit more information about each request than tornado's default
- move static file get success to debug-level (reduces noise)
- get proxied IP instead of proxy IP
- log referer for redirect and failed requests
- log user-agent for failed requests
"""
status = handler.get_status()
request = handler.request
try:
logger = handler.log
except AttributeError:
logger = access_log
if status < 300 or status == 304:
# Successes (or 304 FOUND) are debug-level
log_method = logger.debug
elif status < 400:
log_method = logger.info
elif status < 500:
log_method = logger.warning
else:
log_method = logger.error
request_time = 1000.0 * handler.request.request_time()
ns = dict(
status=status,
method=request.method,
ip=request.remote_ip,
uri=request.uri,
request_time=request_time,
)
msg = "{status} {method} {uri} ({ip}) {request_time:.2f}ms"
if status >= 400:
# log bad referers
ns["referer"] = request.headers.get("Referer", "None")
msg = msg + " referer={referer}"
if status >= 500 and status != 502:
# log all headers if it caused an error
log_method(json.dumps(dict(request.headers), indent=2))
log_method(msg.format(**ns))
prometheus_log_method(handler) | Base | 1 |
def test_modify_access_revoke(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_staff.email,
'rolename': 'staff',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200) | Compound | 4 |
def test_logout_unknown_token(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar')
self.client.login(username=self.user.username, code=login_code.code)
response = self.client.post(
'/accounts-rest/logout/',
HTTP_AUTHORIZATION='Token unknown',
)
self.assertEqual(response.status_code, 200) | Base | 1 |
def delete_scans(request):
context = {}
if request.method == "POST":
list_of_scan_id = []
for key, value in request.POST.items():
if key != "scan_history_table_length" and key != "csrfmiddlewaretoken":
ScanHistory.objects.filter(id=value).delete()
messages.add_message(
request,
messages.INFO,
'All Scans deleted!')
return HttpResponseRedirect(reverse('scan_history')) | Class | 2 |
def test_qute_settings_persistence(short_tmpdir, request, quteproc_new):
"""Make sure settings from qute://settings are persistent."""
args = _base_args(request.config) + ['--basedir', str(short_tmpdir)]
quteproc_new.start(args)
quteproc_new.open_path(
'qute://settings/set?option=search.ignore_case&value=always')
assert quteproc_new.get_setting('search.ignore_case') == 'always'
quteproc_new.send_cmd(':quit')
quteproc_new.wait_for_quit()
quteproc_new.start(args)
assert quteproc_new.get_setting('search.ignore_case') == 'always' | Compound | 4 |
def mysql_insensitive_contains(field: Field, value: str) -> Criterion:
return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f"%{value}%")) | Base | 1 |
def update(self, **kwargs):
consumer_id = load_consumer_id(self.context)
if not consumer_id:
self.prompt.render_failure_message("This consumer is not registered to the Pulp server.")
return
delta = dict([(k, v) for k, v in kwargs.items() if v is not None])
if 'note' in delta.keys():
if delta['note']:
delta['notes'] = args_to_notes_dict(kwargs['note'], include_none=False)
delta.pop('note')
# convert display-name to display_name
key = 'display-name'
if key in delta:
v = delta.pop(key)
key = key.replace('-', '_')
delta[key] = v
if kwargs.get(OPTION_EXCHANGE_KEYS.keyword):
path = self.context.config['authentication']['rsa_pub']
fp = open(path)
try:
delta['rsa_pub'] = fp.read()
finally:
fp.close()
try:
self.context.server.consumer.update(consumer_id, delta)
self.prompt.render_success_message('Consumer [%s] successfully updated' % consumer_id)
if not kwargs.get(OPTION_EXCHANGE_KEYS.keyword):
return
try:
update_server_key(self.context.config)
except Exception, e:
msg = _('Download server RSA key failed [%(e)s]' % {'e': e})
self.prompt.render_failure_message(msg)
except NotFoundException:
self.prompt.write('Consumer [%s] does not exist on the server' % consumer_id, tag='not-found') | Base | 1 |
def run(self, stat_name, criticity, commands, repeat, mustache_dict=None):
"""Run the commands (in background).
- stats_name: plugin_name (+ header)
- criticity: criticity of the trigger
- commands: a list of command line with optional {{mustache}}
- If True, then repeat the action
- mustache_dict: Plugin stats (can be use within {{mustache}})
Return True if the commands have been ran.
"""
if (self.get(stat_name) == criticity and not repeat) or \
not self.start_timer.finished():
# Action already executed => Exit
return False
logger.debug("{} action {} for {} ({}) with stats {}".format(
"Repeat" if repeat else "Run",
commands, stat_name, criticity, mustache_dict))
# Run all actions in background
for cmd in commands:
# Replace {{arg}} by the dict one (Thk to {Mustache})
if chevron_tag:
cmd_full = chevron.render(cmd, mustache_dict)
else:
cmd_full = cmd
# Execute the action
logger.info("Action triggered for {} ({}): {}".format(stat_name,
criticity,
cmd_full))
logger.debug("Action will be executed with the following command: \
subprocess.Popen({}, shell=False)".format(cmd_full.split(' ')))
try:
Popen(cmd_full.split(' '), shell=False)
except OSError as e:
logger.error("Can't execute the action ({})".format(e))
self.set(stat_name, criticity)
return True | Base | 1 |
def validate(self, data):
user = providers.get_provider('login').login_user(**data, context=self.context)
if user is not None:
data['user'] = user
return data | Class | 2 |
def _decompress(compressed_path: Text, target_path: Text) -> None:
with tarfile.open(compressed_path, "r:gz") as tar:
tar.extractall(target_path) # target dir will be created if it not exists | Base | 1 |
def del_project(request, client_id, project):
if request.method == 'GET':
client = Client.objects.get(id=client_id)
try:
scrapyd = get_scrapyd(client)
result = scrapyd.delete_project(project=project)
return JsonResponse(result)
except ConnectionError:
return JsonResponse({'message': 'Connect Error'}) | Base | 1 |
def test_tofile_sep(self):
x = np.array([1.51, 2, 3.51, 4], dtype=float)
f = open(self.filename, 'w')
x.tofile(f, sep=',')
f.close()
f = open(self.filename, 'r')
s = f.read()
f.close()
assert_equal(s, '1.51,2.0,3.51,4.0')
os.unlink(self.filename) | Class | 2 |
def check_valid_db(cls, config_calibre_dir, app_db_path, config_calibre_uuid):
if not config_calibre_dir:
return False, False
dbpath = os.path.join(config_calibre_dir, "metadata.db")
if not os.path.exists(dbpath):
return False, False
try:
check_engine = create_engine('sqlite://',
echo=False,
isolation_level="SERIALIZABLE",
connect_args={'check_same_thread': False},
poolclass=StaticPool)
with check_engine.begin() as connection:
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
local_session = scoped_session(sessionmaker())
local_session.configure(bind=connection)
database_uuid = local_session().query(Library_Id).one_or_none()
# local_session.dispose()
check_engine.connect()
db_change = config_calibre_uuid != database_uuid.uuid
except Exception:
return False, False
return True, db_change | Base | 1 |
def update_dir_structure_gdrive(book_id, first_author, renamed_author):
book = calibre_db.get_book(book_id)
authordir = book.path.split('/')[0]
titledir = book.path.split('/')[1]
new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True)
new_titledir = get_valid_filename(book.title, chars=96) + u" (" + str(book_id) + u")"
if titledir != new_titledir:
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
if gFile:
gd.moveGdriveFileRemote(gFile, new_titledir)
book.path = book.path.split('/')[0] + u'/' + new_titledir
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
else:
return _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
if authordir != new_authordir and authordir not in renamed_author:
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
if gFile:
gd.moveGdriveFolderRemote(gFile, new_authordir)
book.path = new_authordir + u'/' + book.path.split('/')[1]
gd.updateDatabaseOnEdit(gFile['id'], book.path)
else:
return _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
# change location in database to new author/title path
book.path = os.path.join(new_authordir, new_titledir).replace('\\', '/')
return rename_files_on_change(first_author, renamed_author, book, gdrive=True) | Base | 1 |
def test_received_preq_not_completed(self):
inst, sock, map = self._makeOneWithMap()
inst.server = DummyServer()
preq = DummyParser()
inst.request = preq
preq.completed = False
preq.empty = True
inst.received(b"GET / HTTP/1.1\n\n")
self.assertEqual(inst.requests, ())
self.assertEqual(inst.server.tasks, []) | Base | 1 |
def create(request, topic_id):
topic = get_object_or_404(
Topic.objects.for_access(request.user),
pk=topic_id)
form = NotificationCreationForm(
user=request.user,
topic=topic,
data=request.POST)
if form.is_valid():
form.save()
else:
messages.error(request, utils.render_form_errors(form))
return redirect(request.POST.get('next', topic.get_absolute_url())) | Base | 1 |
def test_digest_auth_stale():
# Test that we can handle a nonce becoming stale
http = httplib2.Http()
password = tests.gen_password()
grenew_nonce = [None]
requests = []
handler = tests.http_reflect_with_auth(
allow_scheme="digest",
allow_credentials=(("joe", password),),
out_renew_nonce=grenew_nonce,
out_requests=requests,
)
with tests.server_request(handler, request_count=4) as uri:
http.add_credentials("joe", password)
response, _ = http.request(uri, "GET")
assert response.status == 200
info = httplib2._parse_www_authenticate(
requests[0][1].headers, "www-authenticate"
)
grenew_nonce[0]()
response, _ = http.request(uri, "GET")
assert response.status == 200
assert not response.fromcache
assert getattr(response, "_stale_digest", False)
info2 = httplib2._parse_www_authenticate(
requests[2][1].headers, "www-authenticate"
)
nonce1 = info.get("digest", {}).get("nonce", "")
nonce2 = info2.get("digest", {}).get("nonce", "")
assert nonce1 != ""
assert nonce2 != ""
assert nonce1 != nonce2, (nonce1, nonce2) | Class | 2 |
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path):
new_path = os.path.join(calibre_path, new_authordir, new_titledir)
new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_authordir
try:
if original_filepath:
if not os.path.isdir(new_path):
os.makedirs(new_path)
shutil.move(os.path.normcase(original_filepath), os.path.normcase(os.path.join(new_path, db_filename)))
log.debug("Moving title: %s to %s/%s", original_filepath, new_path, new_name)
else:
# Check new path is not valid path
if not os.path.exists(new_path):
# move original path to new path
log.debug("Moving title: %s to %s", path, new_path)
shutil.move(os.path.normcase(path), os.path.normcase(new_path))
else: # path is valid copy only files to new location (merge)
log.info("Moving title: %s into existing: %s", path, new_path)
# Take all files and subfolder from old path (strange command)
for dir_name, __, file_list in os.walk(path):
for file in file_list:
shutil.move(os.path.normcase(os.path.join(dir_name, file)),
os.path.normcase(os.path.join(new_path + dir_name[len(path):], file)))
# change location in database to new author/title path
localbook.path = os.path.join(new_authordir, new_titledir).replace('\\','/')
except OSError as ex:
log.error("Rename title from: %s to %s: %s", path, new_path, ex)
log.debug(ex, exc_info=True)
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=path, dest=new_path, error=str(ex))
return False | Base | 1 |
async def on_exchange_third_party_invite_request(
self, room_id: str, event_dict: Dict | Class | 2 |
def create(request, comment_id):
comment = get_object_or_404(Comment, pk=comment_id)
form = FlagForm(
user=request.user,
comment=comment,
data=post_data(request))
if is_post(request) and form.is_valid():
form.save()
return redirect(request.POST.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/flag/create.html',
context={
'form': form,
'comment': comment}) | Base | 1 |
def prepare_context(self, request, context, *args, **kwargs):
""" Hook for adding additional data to the context dict """
pass | Class | 2 |
def _not_here_yet(request, *args, **kwargs):
return HttpResponse("Not here yet: %s (%r, %r)" % (request.path, args, kwargs), status=410) | Base | 1 |
def prop_sentences_stats(self, type, vId = None):
return {
'get_data' : "SELECT victims.*, geo.*, victims.ip AS ip_local, COUNT(clicks.id) FROM victims INNER JOIN geo ON victims.id = geo.id LEFT JOIN clicks ON clicks.id = victims.id GROUP BY victims.id ORDER BY victims.time DESC",
'all_networks' : "SELECT networks.* FROM networks ORDER BY id",
'get_preview' : "SELECT victims.*, geo.*, victims.ip AS ip_local FROM victims INNER JOIN geo ON victims.id = geo.id WHERE victims.id = '%s'" % (vId),
'id_networks' : "SELECT networks.* FROM networks WHERE id = '%s'" % (vId),
'get_requests' : "SELECT requests.*, geo.ip FROM requests INNER JOIN geo on geo.id = requests.user_id ORDER BY requests.date DESC, requests.id ",
'get_sessions' : "SELECT COUNT(*) AS Total FROM networks",
'get_clicks' : "SELECT COUNT(*) AS Total FROM clicks",
'get_online' : "SELECT COUNT(*) AS Total FROM victims WHERE status = '%s'" % ('online')
}.get(type, False) | Base | 1 |
def test_process_request(self, rf):
uploaded_file = SimpleUploadedFile("uploaded_file.txt", b"uploaded")
request = rf.post("/", data={"file": uploaded_file})
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"uploaded"
storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file"))
request = rf.post(
"/",
data={
"file": "custom/location/tmp/s3file/s3_file.txt",
"s3file": "file",
},
)
S3FileMiddleware(lambda x: None)(request)
assert request.FILES.getlist("file")
assert request.FILES.get("file").read() == b"s3file" | Base | 1 |
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, "www-authenticate")
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if scheme in challenges:
yield AUTH_SCHEME_CLASSES[scheme](
cred, host, request_uri, headers, response, content, self
) | Class | 2 |
def setUp(self):
self.mock_federation_resource = MockHttpResource()
self.mock_http_client = Mock(spec=[])
self.mock_http_client.put_json = DeferredMockCallable()
hs = yield setup_test_homeserver(
self.addCleanup, http_client=self.mock_http_client, keyring=Mock(),
)
self.filtering = hs.get_filtering()
self.datastore = hs.get_datastore() | Base | 1 |
def config_basic(request):
form = BasicConfigForm(data=post_data(request))
if is_post(request) and form.is_valid():
form.save()
messages.info(request, _("Settings updated!"))
return redirect(request.GET.get("next", request.get_full_path()))
return render(
request=request,
template_name='spirit/admin/config_basic.html',
context={'form': form}) | Base | 1 |
def event_from_pdu_json(pdu_json, outlier=False):
"""Construct a FrozenEvent from an event json received over federation
Args:
pdu_json (object): pdu as received over federation
outlier (bool): True to mark this event as an outlier
Returns:
FrozenEvent
Raises:
SynapseError: if the pdu is missing required fields
"""
# we could probably enforce a bunch of other fields here (room_id, sender,
# origin, etc etc)
assert_params_in_request(pdu_json, ('event_id', 'type'))
event = FrozenEvent(
pdu_json
)
event.internal_metadata.outlier = outlier
return event | Class | 2 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.