code
stringlengths
12
2.05k
label_name
stringclasses
5 values
label
int64
0
4
def new_type_to_old_type(typ: new.BasePrimitive) -> old.NodeType: if isinstance(typ, new.BoolDefinition): return old.BaseType("bool") if isinstance(typ, new.AddressDefinition): return old.BaseType("address") if isinstance(typ, new.InterfaceDefinition): return old.InterfaceType(typ._id) if isinstance(typ, new.BytesMDefinition): m = typ._length # type: ignore return old.BaseType(f"bytes{m}") if isinstance(typ, new.BytesArrayDefinition): return old.ByteArrayType(typ.length) if isinstance(typ, new.StringDefinition): return old.StringType(typ.length) if isinstance(typ, new.DecimalDefinition): return old.BaseType("decimal") if isinstance(typ, new.SignedIntegerAbstractType): bits = typ._bits # type: ignore return old.BaseType("int" + str(bits)) if isinstance(typ, new.UnsignedIntegerAbstractType): bits = typ._bits # type: ignore return old.BaseType("uint" + str(bits)) if isinstance(typ, new.ArrayDefinition): return old.SArrayType(new_type_to_old_type(typ.value_type), typ.length) if isinstance(typ, new.DynamicArrayDefinition): return old.DArrayType(new_type_to_old_type(typ.value_type), typ.length) if isinstance(typ, new.TupleDefinition): return old.TupleType(typ.value_type) if isinstance(typ, new.StructDefinition): return old.StructType( {n: new_type_to_old_type(t) for (n, t) in typ.members.items()}, typ._id ) raise InvalidType(f"unknown type {typ}")
Class
2
def get(cls, uuid): """Return a `Resource` instance of this class identified by the given code or UUID. Only `Resource` classes with specified `member_path` attributes can be directly requested with this method. """ url = urljoin(recurly.base_uri(), cls.member_path % (uuid,)) resp, elem = cls.element_for_url(url) return cls.from_element(elem)
Base
1
def testSizeAndClear(self): with ops.Graph().as_default() as G: with ops.device('/cpu:0'): x = array_ops.placeholder(dtypes.float32, name='x') pi = array_ops.placeholder(dtypes.int64) gi = array_ops.placeholder(dtypes.int64) v = 2. * (array_ops.zeros([128, 128]) + x) with ops.device(test.gpu_device_name()): stager = data_flow_ops.MapStagingArea( [dtypes.float32, dtypes.float32], shapes=[[], [128, 128]], names=['x', 'v']) stage = stager.put(pi, {'x': x, 'v': v}) size = stager.size() clear = stager.clear() G.finalize() with self.session(graph=G) as sess: sess.run(stage, feed_dict={x: -1, pi: 3}) self.assertEqual(sess.run(size), 1) sess.run(stage, feed_dict={x: -1, pi: 1}) self.assertEqual(sess.run(size), 2) sess.run(clear) self.assertEqual(sess.run(size), 0)
Base
1
def make_tarfile(output_filename, source_dir, archive_name, custom_filter=None): # Helper for filtering out modification timestamps def _filter_timestamps(tar_info): tar_info.mtime = 0 return tar_info if custom_filter is None else custom_filter(tar_info) unzipped_filename = tempfile.mktemp() try: with tarfile.open(unzipped_filename, "w") as tar: tar.add(source_dir, arcname=archive_name, filter=_filter_timestamps) # When gzipping the tar, don't include the tar's filename or modification time in the # zipped archive (see https://docs.python.org/3/library/gzip.html#gzip.GzipFile) with gzip.GzipFile( filename="", fileobj=open(output_filename, "wb"), mode="wb", mtime=0 ) as gzipped_tar, open(unzipped_filename, "rb") as tar: gzipped_tar.write(tar.read()) finally: os.remove(unzipped_filename)
Class
2
def test_entrance_exam_delete_state_with_staff(self): """ Test entrance exam delete state failure with staff access. """ self.client.logout() staff_user = StaffFactory(course_key=self.course.id) self.client.login(username=staff_user.username, password='test') url = reverse('reset_student_attempts_for_entrance_exam', kwargs={'course_id': unicode(self.course.id)}) response = self.client.get(url, { 'unique_student_identifier': self.student.email, 'delete_module': True, }) self.assertEqual(response.status_code, 403)
Compound
4
def _get_obj_absolute_path(obj_path): return os.path.join(DATAROOT, obj_path)
Base
1
def MD5(self,data:str): sha = hashlib.md5(bytes(data.encode())) hash = str(sha.digest()) return self.__Salt(hash,salt=self.salt)
Base
1
def MD5(self,data:str): sha = hashlib.md5(bytes(data.encode())) hash = str(sha.digest()) return self.__Salt(hash,salt=self.salt)
Class
2
def logout(): if current_user is not None and current_user.is_authenticated: ub.delete_user_session(current_user.id, flask_session.get('_id',"")) logout_user() if feature_support['oauth'] and (config.config_login_type == 2 or config.config_login_type == 3): logout_oauth_user() log.debug(u"User logged out") return redirect(url_for('web.login'))
Base
1
def test_get_students_who_may_enroll(self): """ Test whether get_students_who_may_enroll returns an appropriate status message when users request a CSV file of students who may enroll in a course. """ url = reverse( 'get_students_who_may_enroll', kwargs={'course_id': unicode(self.course.id)} ) # Successful case: response = self.client.get(url, {}) res_json = json.loads(response.content) self.assertIn('status', res_json) self.assertNotIn('currently being created', res_json['status']) # CSV generation already in progress: with patch('instructor_task.api.submit_calculate_may_enroll_csv') as submit_task_function: error = AlreadyRunningError() submit_task_function.side_effect = error response = self.client.get(url, {}) res_json = json.loads(response.content) self.assertIn('status', res_json) self.assertIn('currently being created', res_json['status'])
Compound
4
def __init__(self, conn=None, host=None, result=None, comm_ok=True, diff=dict()): # which host is this ReturnData about? if conn is not None: self.host = conn.host delegate = getattr(conn, 'delegate', None) if delegate is not None: self.host = delegate else: self.host = host self.result = result self.comm_ok = comm_ok # if these values are set and used with --diff we can show # changes made to particular files self.diff = diff if type(self.result) in [ str, unicode ]: self.result = utils.parse_json(self.result) if self.host is None: raise Exception("host not set") if type(self.result) != dict: raise Exception("dictionary result expected")
Class
2
def list_zones(self): pipe = subprocess.Popen([self.zoneadm_cmd, 'list', '-ip'], cwd=self.runner.basedir, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) #stdout, stderr = p.communicate() zones = [] for l in pipe.stdout.readlines(): # 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared s = l.split(':') if s[1] != 'global': zones.append(s[1]) return zones
Base
1
def make_homeserver(self, reactor, clock): self.push_attempts = [] m = Mock() def post_json_get_json(url, body): d = Deferred() self.push_attempts.append((d, url, body)) return make_deferred_yieldable(d) m.post_json_get_json = post_json_get_json config = self.default_config() config["start_pushers"] = True hs = self.setup_test_homeserver(config=config, proxied_http_client=m) return hs
Base
1
def __init__(self, crawler, splash_base_url, slot_policy, log_400): self.crawler = crawler self.splash_base_url = splash_base_url self.slot_policy = slot_policy self.log_400 = log_400 self.crawler.signals.connect(self.spider_opened, signals.spider_opened)
Class
2
def test_get_conditions(self, freeze): conditions = ClearableFileInput().get_conditions(None) assert all( condition in conditions for condition in [ {"bucket": "test-bucket"}, {"success_action_status": "201"}, ["starts-with", "$key", "custom/location/tmp"], ["starts-with", "$Content-Type", ""], ] ), conditions
Base
1
def get_current_phase(self, requested_phase_identifier): found = False for phase in self.phases: if phase.is_valid(): phase.process() if found or not requested_phase_identifier or requested_phase_identifier == phase.identifier: found = True # We're at or past the requested phase if not phase.should_skip(): return phase if not phase.should_skip() and not phase.is_valid(): # A past phase is not valid, that's the current one return phase raise Http404("Error! Phase with identifier `%s` not found." % requested_phase_identifier) # pragma: no cover
Base
1
def get_http_client(self) -> MatrixFederationHttpClient: tls_client_options_factory = context_factory.FederationPolicyForHTTPS( self.config ) return MatrixFederationHttpClient(self, tls_client_options_factory)
Base
1
def __init__(self, text, book): self.text = text self.book = book
Base
1
def test_modify_access_allow(self): url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()}) response = self.client.get(url, { 'unique_student_identifier': self.other_user.email, 'rolename': 'staff', 'action': 'allow', }) self.assertEqual(response.status_code, 200)
Compound
4
def test_get_student_progress_url_nostudent(self): """ Test that the endpoint 400's when requesting an unknown email. """ url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()}) response = self.client.get(url) self.assertEqual(response.status_code, 400)
Compound
4
def save_cover_from_url(url, book_path): try: if not cli.allow_localhost: # 127.0.x.x, localhost, [::1], [::ffff:7f00:1] ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0] if ip.startswith("127.") or ip.startswith('::ffff:7f') or ip == "::1": log.error("Localhost was accessed for cover upload") return False, _("You are not allowed to access localhost for cover uploads") img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling img.raise_for_status() return save_cover(img, book_path) except (socket.gaierror, requests.exceptions.HTTPError, requests.exceptions.ConnectionError, requests.exceptions.Timeout) as ex: log.info(u'Cover Download Error %s', ex) return False, _("Error Downloading Cover") except MissingDelegateError as ex: log.info(u'File Format Error %s', ex) return False, _("Cover Format Error")
Base
1
def visit_Call(self, node): """ A couple function calls are supported: bson's ObjectId() and datetime(). """ if isinstance(node.func, ast.Name): expr = None if node.func.id == 'ObjectId': expr = "('" + node.args[0].s + "')" elif node.func.id == 'datetime': values = [] for arg in node.args: values.append(str(arg.n)) expr = "(" + ", ".join(values) + ")" if expr: self.current_value = eval(node.func.id + expr)
Base
1
def prop_sentences_stats(self, type, vId = None): return { 'get_data' : "SELECT victims.*, geo.*, victims.ip AS ip_local, COUNT(clicks.id) FROM victims INNER JOIN geo ON victims.id = geo.id LEFT JOIN clicks ON clicks.id = victims.id GROUP BY victims.id ORDER BY victims.time DESC", 'all_networks' : "SELECT networks.* FROM networks ORDER BY id", 'get_preview' : "SELECT victims.*, geo.*, victims.ip AS ip_local FROM victims INNER JOIN geo ON victims.id = geo.id WHERE victims.id = '%s'" % (vId), 'id_networks' : "SELECT networks.* FROM networks WHERE id = '%s'" % (vId), 'get_requests' : "SELECT requests.*, geo.ip FROM requests INNER JOIN geo on geo.id = requests.user_id ORDER BY requests.date DESC, requests.id ", 'get_sessions' : "SELECT COUNT(*) AS Total FROM networks", 'get_clicks' : "SELECT COUNT(*) AS Total FROM clicks", 'get_online' : "SELECT COUNT(*) AS Total FROM victims WHERE status = '%s'" % ('online') }.get(type, False)
Base
1
def test_modify_access_allow_with_uname(self): url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()}) response = self.client.get(url, { 'unique_student_identifier': self.other_instructor.username, 'rolename': 'staff', 'action': 'allow', }) self.assertEqual(response.status_code, 200)
Compound
4
def camera_img_return_path(camera_unique_id, img_type, filename): """Return an image from stills or time-lapses""" camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first() camera_path = assure_path_exists( os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id))) if img_type == 'still': if camera.path_still: path = camera.path_still else: path = os.path.join(camera_path, img_type) elif img_type == 'timelapse': if camera.path_timelapse: path = camera.path_timelapse else: path = os.path.join(camera_path, img_type) else: return "Unknown Image Type" if os.path.isdir(path): files = (files for files in os.listdir(path) if os.path.isfile(os.path.join(path, files))) else: files = [] if filename in files: path_file = os.path.join(path, filename) return send_file(path_file, mimetype='image/jpeg') return "Image not found"
Base
1
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable=None, in_data=None): ''' run a command on the zone ''' if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported: raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method) if in_data: raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining") # We happily ignore privilege escalation if executable == '/bin/sh': executable = None local_cmd = self._generate_cmd(executable, cmd) vvv("EXEC %s" % (local_cmd), host=self.zone) p = subprocess.Popen(local_cmd, shell=isinstance(local_cmd, basestring), cwd=self.runner.basedir, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() return (p.returncode, '', stdout, stderr)
Base
1
def testInputParserBothDuplicate(self): x0 = np.array([[1], [2]]) input_path = os.path.join(test.get_temp_dir(), 'input.npz') np.savez(input_path, a=x0) x1 = np.ones([2, 10]) input_str = 'x0=' + input_path + '[a]' input_expr_str = 'x0=np.ones([2,10])' feed_dict = saved_model_cli.load_inputs_from_input_arg_string( input_str, input_expr_str, '') self.assertTrue(np.all(feed_dict['x0'] == x1))
Base
1
def _cnonce(): dig = _md5( ( "%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)]) ).encode("utf-8") ).hexdigest() return dig[:16]
Class
2
def whitelist(f): """Decorator: Whitelist method to be called remotely via REST API.""" f.whitelisted = True return f
Base
1
def test_visitor(self): class CustomVisitor(self.asdl.VisitorBase): def __init__(self): super().__init__() self.names_with_seq = [] def visitModule(self, mod): for dfn in mod.dfns: self.visit(dfn) def visitType(self, type): self.visit(type.value) def visitSum(self, sum): for t in sum.types: self.visit(t) def visitConstructor(self, cons): for f in cons.fields: if f.seq: self.names_with_seq.append(cons.name) v = CustomVisitor() v.visit(self.types['mod']) self.assertEqual(v.names_with_seq, ['Module', 'Interactive', 'Suite'])
Base
1
def _expand_user_properties(self, template): # Make sure username and servername match the restrictions for DNS labels # Note: '-' is not in safe_chars, as it is being used as escape character safe_chars = set(string.ascii_lowercase + string.digits) # Set servername based on whether named-server initialised if self.name: # use two -- to ensure no collision possibilities # are created by an ambiguous boundary between username and # servername. # -- cannot occur in a string where - is the escape char. servername = '--{}'.format(self.name) safe_servername = '--{}'.format(escapism.escape(self.name, safe=safe_chars, escape_char='-').lower()) else: servername = '' safe_servername = '' legacy_escaped_username = ''.join([s if s in safe_chars else '-' for s in self.user.name.lower()]) safe_username = escapism.escape(self.user.name, safe=safe_chars, escape_char='-').lower() return template.format( userid=self.user.id, username=safe_username, unescaped_username=self.user.name, legacy_escape_username=legacy_escaped_username, servername=safe_servername, unescaped_servername=servername, )
Class
2
def __init__(self, desc, response, content): self.response = response self.content = content HttpLib2Error.__init__(self, desc)
Class
2
def test_reset_extension_to_deleted_date(self): """ Test that we can delete a due date extension after deleting the normal due date, without causing an error. """ url = reverse('change_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()}) response = self.client.get(url, { 'student': self.user1.username, 'url': self.week1.location.to_deprecated_string(), 'due_datetime': '12/30/2013 00:00' }) self.assertEqual(response.status_code, 200, response.content) self.assertEqual(datetime.datetime(2013, 12, 30, 0, 0, tzinfo=utc), get_extended_due(self.course, self.week1, self.user1)) self.week1.due = None self.week1 = self.store.update_item(self.week1, self.user1.id) # Now, week1's normal due date is deleted but the extension still exists. url = reverse('reset_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()}) response = self.client.get(url, { 'student': self.user1.username, 'url': self.week1.location.to_deprecated_string(), }) self.assertEqual(response.status_code, 200, response.content) self.assertEqual( None, get_extended_due(self.course, self.week1, self.user1) )
Compound
4
def read_config(self, config, **kwargs): # FIXME: federation_domain_whitelist needs sytests self.federation_domain_whitelist = None # type: Optional[dict] federation_domain_whitelist = config.get("federation_domain_whitelist", None) if federation_domain_whitelist is not None: # turn the whitelist into a hash for speed of lookup self.federation_domain_whitelist = {} for domain in federation_domain_whitelist: self.federation_domain_whitelist[domain] = True self.federation_ip_range_blacklist = config.get( "federation_ip_range_blacklist", [] ) # Attempt to create an IPSet from the given ranges try: self.federation_ip_range_blacklist = IPSet( self.federation_ip_range_blacklist ) # Always blacklist 0.0.0.0, :: self.federation_ip_range_blacklist.update(["0.0.0.0", "::"]) except Exception as e: raise ConfigError( "Invalid range(s) provided in federation_ip_range_blacklist: %s" % e ) federation_metrics_domains = config.get("federation_metrics_domains") or [] validate_config( _METRICS_FOR_DOMAINS_SCHEMA, federation_metrics_domains, ("federation_metrics_domains",), ) self.federation_metrics_domains = set(federation_metrics_domains)
Base
1
def debug_decisions(self, text): """ Classifies candidate periods as sentence breaks, yielding a dict for each that may be used to understand why the decision was made. See format_debug_decision() to help make this output readable. """ for match in self._lang_vars.period_context_re().finditer(text): decision_text = match.group() + match.group("after_tok") tokens = self._tokenize_words(decision_text) tokens = list(self._annotate_first_pass(tokens)) while tokens and not tokens[0].tok.endswith(self._lang_vars.sent_end_chars): tokens.pop(0) yield { "period_index": match.end() - 1, "text": decision_text, "type1": tokens[0].type, "type2": tokens[1].type, "type1_in_abbrs": bool(tokens[0].abbr), "type1_is_initial": bool(tokens[0].is_initial), "type2_is_sent_starter": tokens[1].type_no_sentperiod in self._params.sent_starters, "type2_ortho_heuristic": self._ortho_heuristic(tokens[1]), "type2_ortho_contexts": set( self._params._debug_ortho_context(tokens[1].type_no_sentperiod) ), "collocation": ( tokens[0].type_no_sentperiod, tokens[1].type_no_sentperiod, ) in self._params.collocations, "reason": self._second_pass_annotation(tokens[0], tokens[1]) or REASON_DEFAULT_DECISION, "break_decision": tokens[0].sentbreak, }
Class
2
def task_remove(request, task_id): """ remove task by task_id :param request: :return: """ if request.method == 'POST': try: # delete job from DjangoJob task = Task.objects.get(id=task_id) clients = clients_of_task(task) for client in clients: job_id = get_job_id(client, task) DjangoJob.objects.filter(name=job_id).delete() # delete task Task.objects.filter(id=task_id).delete() return JsonResponse({'result': '1'}) except: return JsonResponse({'result': '0'})
Base
1
def test_magic_response2(): # check 'body' handling and another 'headers' format mw = _get_mw() req = SplashRequest('http://example.com/', magic_response=True, headers={'foo': 'bar'}, dont_send_headers=True) req = mw.process_request(req, None) assert 'headers' not in req.meta['splash']['args'] resp_data = { 'body': base64.b64encode(b"binary data").decode('ascii'), 'headers': {'Content-Type': 'text/plain'}, } resp = TextResponse("http://mysplash.example.com/execute", headers={b'Content-Type': b'application/json'}, body=json.dumps(resp_data).encode('utf8')) resp2 = mw.process_response(req, resp, None) assert resp2.data == resp_data assert resp2.body == b'binary data' assert resp2.headers == {b'Content-Type': [b'text/plain']} assert resp2.splash_response_headers == {b'Content-Type': [b'application/json']} assert resp2.status == resp2.splash_response_status == 200 assert resp2.url == "http://example.com/"
Class
2
def test_dump(self): node = ast.parse('spam(eggs, "and cheese")') self.assertEqual(ast.dump(node), "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), " "args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')], " "keywords=[]))])" ) self.assertEqual(ast.dump(node, annotate_fields=False), "Module([Expr(Call(Name('spam', Load()), [Name('eggs', Load()), " "Constant('and cheese')], []))])" ) self.assertEqual(ast.dump(node, include_attributes=True), "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load(), " "lineno=1, col_offset=0, end_lineno=1, end_col_offset=4), " "args=[Name(id='eggs', ctx=Load(), lineno=1, col_offset=5, " "end_lineno=1, end_col_offset=9), Constant(value='and cheese', " "lineno=1, col_offset=11, end_lineno=1, end_col_offset=23)], keywords=[], " "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24), " "lineno=1, col_offset=0, end_lineno=1, end_col_offset=24)])" )
Base
1
def resolve_orders(root: models.User, info, **kwargs): from ..order.types import OrderCountableConnection def _resolve_orders(orders): requester = get_user_or_app_from_context(info.context) if not requester.has_perm(OrderPermissions.MANAGE_ORDERS): orders = list( filter(lambda order: order.status != OrderStatus.DRAFT, orders) ) return create_connection_slice( orders, info, kwargs, OrderCountableConnection ) return OrdersByUserLoader(info.context).load(root.id).then(_resolve_orders)
Class
2
def remove_auth_hashes(input: Optional[str]): if not input: return input # If there are no hashes, skip the RE for performance. if not any([pw_hash in input for pw_hash in PASSWORD_HASHERS_ALL.keys()]): return input return re_remove_passwords.sub(r'\1 %s # Filtered for security' % PASSWORD_HASH_DUMMY_VALUE, input)
Base
1
def get_email_content_response(self, num_emails, task_history_request, with_failures=False): """ Calls the list_email_content endpoint and returns the repsonse """ self.setup_fake_email_info(num_emails, with_failures) task_history_request.return_value = self.tasks.values() url = reverse('list_email_content', kwargs={'course_id': self.course.id.to_deprecated_string()}) with patch('instructor.views.api.CourseEmail.objects.get') as mock_email_info: mock_email_info.side_effect = self.get_matching_mock_email response = self.client.get(url, {}) self.assertEqual(response.status_code, 200) return response
Compound
4
def property_from_data( name: str, required: bool, data: Union[oai.Reference, oai.Schema]
Base
1
def test_process_request__multiple_files(self, rf): storage.save("tmp/s3file/s3_file.txt", ContentFile(b"s3file")) storage.save("tmp/s3file/s3_other_file.txt", ContentFile(b"other s3file")) request = rf.post( "/", data={ "file": [ "custom/location/tmp/s3file/s3_file.txt", "custom/location/tmp/s3file/s3_other_file.txt", ], "s3file": ["file", "other_file"], }, ) S3FileMiddleware(lambda x: None)(request) files = request.FILES.getlist("file") assert files[0].read() == b"s3file" assert files[1].read() == b"other s3file"
Base
1
def load_event(self, args, filename, from_misp, stix_version): self.outputname = '{}.json'.format(filename) if len(args) > 0 and args[0]: self.add_original_file(filename, args[0], stix_version) try: event_distribution = args[1] if not isinstance(event_distribution, int): event_distribution = int(event_distribution) if event_distribution.isdigit() else 5 except IndexError: event_distribution = 5 try: attribute_distribution = args[2] if attribute_distribution == 'event': attribute_distribution = event_distribution elif not isinstance(attribute_distribution, int): attribute_distribution = int(attribute_distribution) if attribute_distribution.isdigit() else event_distribution except IndexError: attribute_distribution = event_distribution self.misp_event.distribution = event_distribution self.__attribute_distribution = attribute_distribution self.from_misp = from_misp self.load_mapping()
Base
1
def main(srcfile, dump_module=False): argv0 = sys.argv[0] components = argv0.split(os.sep) argv0 = os.sep.join(components[-2:]) auto_gen_msg = common_msg % argv0 mod = asdl.parse(srcfile) if dump_module: print('Parsed Module:') print(mod) if not asdl.check(mod): sys.exit(1) if INC_DIR: p = "%s/%s-ast.h" % (INC_DIR, mod.name) f = open(p, "w") f.write(auto_gen_msg) f.write('#include "asdl.h"\n\n') c = ChainOfVisitors(TypeDefVisitor(f), StructVisitor(f), PrototypeVisitor(f), ) c.visit(mod) f.write("PyObject* Ta3AST_mod2obj(mod_ty t);\n") f.write("mod_ty Ta3AST_obj2mod(PyObject* ast, PyArena* arena, int mode);\n") f.write("int Ta3AST_Check(PyObject* obj);\n") f.close() if SRC_DIR: p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c") f = open(p, "w") f.write(auto_gen_msg) f.write('#include <stddef.h>\n') f.write('\n') f.write('#include "Python.h"\n') f.write('#include "%s-ast.h"\n' % mod.name) f.write('\n') f.write("static PyTypeObject AST_type;\n") v = ChainOfVisitors( PyTypesDeclareVisitor(f), PyTypesVisitor(f), Obj2ModPrototypeVisitor(f), FunctionVisitor(f), ObjVisitor(f), Obj2ModVisitor(f), ASTModuleVisitor(f), PartingShots(f), ) v.visit(mod) f.close()
Base
1
def test_evaluate_dict_key_as_underscore(self): ec = self._makeContext() self.assertEqual(ec.evaluate('d/_'), 'under')
Base
1
def job_list(request, client_id, project_name): """ get job list of project from one client :param request: request object :param client_id: client id :param project_name: project name :return: list of jobs """ if request.method == 'GET': client = Client.objects.get(id=client_id) scrapyd = get_scrapyd(client) try: result = scrapyd.list_jobs(project_name) jobs = [] statuses = ['pending', 'running', 'finished'] for status in statuses: for job in result.get(status): job['status'] = status jobs.append(job) return JsonResponse(jobs) except ConnectionError: return JsonResponse({'message': 'Connect Error'}, status=500)
Base
1
def test_basic_lua(settings): class LuaScriptSpider(ResponseSpider): """ Make a request using a Lua script similar to the one from README """ def start_requests(self): yield SplashRequest(self.url + "#foo", endpoint='execute', args={'lua_source': DEFAULT_SCRIPT, 'foo': 'bar'}) items, url, crawler = yield crawl_items(LuaScriptSpider, HelloWorld, settings) assert len(items) == 1 resp = items[0]['response'] assert resp.url == url + "/#foo" assert resp.status == resp.splash_response_status == 200 assert resp.css('body::text').extract_first().strip() == "hello world!" assert resp.data['jsvalue'] == 3 assert resp.headers['X-MyHeader'] == b'my value' assert resp.headers['Content-Type'] == b'text/html' assert resp.splash_response_headers['Content-Type'] == b'application/json' assert resp.data['args']['foo'] == 'bar'
Class
2
def put_file(self, in_path, out_path): ''' transfer a file from local to chroot ''' out_path = self._normalize_path(out_path, self.get_jail_path()) vvv("PUT %s TO %s" % (in_path, out_path), host=self.jail) self._copy_file(in_path, out_path)
Base
1
def testInputParserPythonExpression(self): x1 = np.ones([2, 10]) x2 = np.array([[1], [2], [3]]) x3 = np.mgrid[0:5, 0:5] x4 = [[3], [4]] input_expr_str = ('x1=np.ones([2,10]);x2=np.array([[1],[2],[3]]);' 'x3=np.mgrid[0:5,0:5];x4=[[3],[4]]') feed_dict = saved_model_cli.load_inputs_from_input_arg_string( '', input_expr_str, '') self.assertTrue(np.all(feed_dict['x1'] == x1)) self.assertTrue(np.all(feed_dict['x2'] == x2)) self.assertTrue(np.all(feed_dict['x3'] == x3)) self.assertTrue(np.all(feed_dict['x4'] == x4))
Base
1
def get_install_extras_require(): extras_require = { 'action': ['chevron'], 'browser': ['zeroconf==0.19.1' if PY2 else 'zeroconf>=0.19.1'], 'cloud': ['requests'], 'docker': ['docker>=2.0.0'], 'export': ['bernhard', 'cassandra-driver', 'couchdb', 'elasticsearch', 'graphitesender', 'influxdb>=1.0.0', 'kafka-python', 'pika', 'paho-mqtt', 'potsdb', 'prometheus_client', 'pyzmq', 'statsd'], 'folders': ['scandir'], # python_version<"3.5" 'gpu': ['py3nvml'], 'graph': ['pygal'], 'ip': ['netifaces'], 'raid': ['pymdstat'], 'smart': ['pySMART.smartx'], 'snmp': ['pysnmp'], 'sparklines': ['sparklines'], 'web': ['bottle', 'requests'], 'wifi': ['wifi'] } # Add automatically the 'all' target extras_require.update({'all': [i[0] for i in extras_require.values()]}) return extras_require
Base
1
def constructObject(data): try: classBase = eval(data[""] + "." + data[""].title()) except NameError: logger.error("Don't know how to handle message type: \"%s\"", data[""]) return None try: returnObj = classBase() returnObj.decode(data) except KeyError as e: logger.error("Missing mandatory key %s", e) return None except: logger.error("classBase fail", exc_info=True) return None else: return returnObj
Base
1
def home_get_preview(): vId = request.form['vId'] d = db.sentences_stats('get_preview', vId) n = db.sentences_stats('id_networks', vId) return json.dumps({'status' : 'OK', 'vId' : vId, 'd' : d, 'n' : n});
Base
1
def test_with_admins(self) -> None: no_admins = InstanceConfig(admins=None) with_admins = InstanceConfig(admins=[UUID(int=0)]) with_admins_2 = InstanceConfig(admins=[UUID(int=1)]) no_admins.update(with_admins) self.assertEqual(no_admins.admins, None) with_admins.update(with_admins_2) self.assertEqual(with_admins.admins, with_admins_2.admins)
Class
2
def render_html(request): """ render html with url :param request: :return: """ if request.method == 'GET': url = request.GET.get('url') url = unquote(base64.b64decode(url).decode('utf-8')) js = request.GET.get('js', 0) script = request.GET.get('script') try: response = requests.get(url, timeout=5) response.encoding = response.apparent_encoding html = process_html(response.text) return HttpResponse(html) except Exception as e: return JsonResponse({'message': e.args}, status=500)
Base
1
def add_security_headers(resp): resp.headers['Content-Security-Policy'] = "default-src 'self' 'unsafe-inline' 'unsafe-eval';" if request.endpoint == "editbook.edit_book": resp.headers['Content-Security-Policy'] += "img-src * data:" resp.headers['X-Content-Type-Options'] = 'nosniff' resp.headers['X-Frame-Options'] = 'SAMEORIGIN' resp.headers['X-XSS-Protection'] = '1; mode=block' resp.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains' # log.debug(request.full_path) return resp
Compound
4
def mysql_insensitive_ends_with(field: Field, value: str) -> Criterion: return functions.Upper(functions.Cast(field, SqlTypes.CHAR)).like(functions.Upper(f"%{value}"))
Base
1
def test_digest(): # Test that we support Digest Authentication http = httplib2.Http() password = tests.gen_password() handler = tests.http_reflect_with_auth( allow_scheme="digest", allow_credentials=(("joe", password),) ) with tests.server_request(handler, request_count=3) as uri: response, content = http.request(uri, "GET") assert response.status == 401 http.add_credentials("joe", password) response, content = http.request(uri, "GET") assert response.status == 200, content.decode()
Class
2
def format_date(self, data): """ A hook to control how dates are formatted. Can be overridden at the ``Serializer`` level (``datetime_formatting``) or globally (via ``settings.TASTYPIE_DATETIME_FORMATTING``). Default is ``iso-8601``, which looks like "2010-12-16". """ if self.datetime_formatting == 'rfc-2822': return format_date(data) return data.isoformat()
Class
2
def test_bad_request(settings): class BadRequestSpider(ResponseSpider): custom_settings = {'HTTPERROR_ALLOW_ALL': True} def start_requests(self): yield SplashRequest(self.url, endpoint='execute', args={'lua_source': DEFAULT_SCRIPT, 'wait': 'bar'}) class GoodRequestSpider(ResponseSpider): custom_settings = {'HTTPERROR_ALLOW_ALL': True} def start_requests(self): yield SplashRequest(self.url, endpoint='execute', args={'lua_source': DEFAULT_SCRIPT}) items, url, crawler = yield crawl_items(BadRequestSpider, HelloWorld, settings) resp = items[0]['response'] assert resp.status == 400 assert resp.splash_response_status == 400 items, url, crawler = yield crawl_items(GoodRequestSpider, Http400Resource, settings) resp = items[0]['response'] assert resp.status == 400 assert resp.splash_response_status == 200
Class
2
def get_install_requires(): requires = ['psutil>=5.3.0', 'future'] if sys.platform.startswith('win'): requires.append('bottle') requires.append('requests') return requires
Base
1
def relative(self, relativePath) -> FileInputSource: return FileInputSource(os.path.join(self.directory(), relativePath))
Base
1
def test_http11_list(self): body = string.ascii_letters to_send = "GET /list HTTP/1.1\n" "Content-Length: %d\n\n" % len(body) to_send += body to_send = tobytes(to_send) self.connect() self.sock.send(to_send) fp = self.sock.makefile("rb", 0) line, headers, response_body = read_http(fp) self.assertline(line, "200", "OK", "HTTP/1.1") self.assertEqual(headers["content-length"], str(len(body))) self.assertEqual(response_body, tobytes(body)) # remote keeps connection open because it divined the content length # from a length-1 list self.sock.send(to_send) line, headers, response_body = read_http(fp) self.assertline(line, "200", "OK", "HTTP/1.1")
Base
1
def move_files_on_change(calibre_path, new_authordir, new_titledir, localbook, db_filename, original_filepath, path): new_path = os.path.join(calibre_path, new_authordir, new_titledir) new_name = get_valid_filename(localbook.title, chars=96) + ' - ' + new_authordir try: if original_filepath: if not os.path.isdir(new_path): os.makedirs(new_path) shutil.move(os.path.normcase(original_filepath), os.path.normcase(os.path.join(new_path, db_filename))) log.debug("Moving title: %s to %s/%s", original_filepath, new_path, new_name) else: # Check new path is not valid path if not os.path.exists(new_path): # move original path to new path log.debug("Moving title: %s to %s", path, new_path) shutil.move(os.path.normcase(path), os.path.normcase(new_path)) else: # path is valid copy only files to new location (merge) log.info("Moving title: %s into existing: %s", path, new_path) # Take all files and subfolder from old path (strange command) for dir_name, __, file_list in os.walk(path): for file in file_list: shutil.move(os.path.normcase(os.path.join(dir_name, file)), os.path.normcase(os.path.join(new_path + dir_name[len(path):], file))) # change location in database to new author/title path localbook.path = os.path.join(new_authordir, new_titledir).replace('\\','/') except OSError as ex: log.error("Rename title from: %s to %s: %s", path, new_path, ex) log.debug(ex, exc_info=True) return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_path, error=str(ex)) return False
Base
1
def __init__(self, hs): self.hs = hs self.auth = hs.get_auth() self.client = hs.get_http_client() self.clock = hs.get_clock() self.server_name = hs.hostname self.store = hs.get_datastore() self.max_upload_size = hs.config.max_upload_size self.max_image_pixels = hs.config.max_image_pixels self.primary_base_path = hs.config.media_store_path self.filepaths = MediaFilePaths(self.primary_base_path) self.dynamic_thumbnails = hs.config.dynamic_thumbnails self.thumbnail_requirements = hs.config.thumbnail_requirements self.remote_media_linearizer = Linearizer(name="media_remote") self.recently_accessed_remotes = set() self.recently_accessed_locals = set() self.federation_domain_whitelist = hs.config.federation_domain_whitelist # List of StorageProviders where we should search for media and # potentially upload to. storage_providers = [] for clz, provider_config, wrapper_config in hs.config.media_storage_providers: backend = clz(hs, provider_config) provider = StorageProviderWrapper( backend, store_local=wrapper_config.store_local, store_remote=wrapper_config.store_remote, store_synchronous=wrapper_config.store_synchronous, ) storage_providers.append(provider) self.media_storage = MediaStorage( self.hs, self.primary_base_path, self.filepaths, storage_providers ) self.clock.looping_call( self._start_update_recently_accessed, UPDATE_RECENTLY_ACCESSED_TS )
Base
1
def boboAwareZopeTraverse(object, path_items, econtext): """Traverses a sequence of names, first trying attributes then items. This uses zope.traversing path traversal where possible and interacts correctly with objects providing OFS.interface.ITraversable when necessary (bobo-awareness). """ request = getattr(econtext, 'request', None) path_items = list(path_items) path_items.reverse() while path_items: name = path_items.pop() if name == '_': warnings.warn('Traversing to the name `_` is deprecated ' 'and will be removed in Zope 6.', DeprecationWarning) elif name.startswith('_'): raise NotFound(name) if OFS.interfaces.ITraversable.providedBy(object): object = object.restrictedTraverse(name) else: object = traversePathElement(object, name, path_items, request=request) return object
Base
1
def new_user(): content = ub.User() languages = calibre_db.speaking_language() translations = [LC('en')] + babel.list_translations() kobo_support = feature_support['kobo'] and config.config_kobo_sync if request.method == "POST": to_save = request.form.to_dict() _handle_new_user(to_save, content, languages, translations, kobo_support) else: content.role = config.config_default_role content.sidebar_view = config.config_default_show content.locale = config.config_default_locale content.default_language = config.config_default_language return render_title_template("user_edit.html", new_user=1, content=content, config=config, translations=translations, languages=languages, title=_(u"Add new user"), page="newuser", kobo_support=kobo_support, registered_oauth=oauth_check)
Base
1
def test_after_start_response_http11(self): to_send = "GET /after_start_response HTTP/1.1\n\n" to_send = tobytes(to_send) self.connect() self.sock.send(to_send) fp = self.sock.makefile("rb", 0) line, headers, response_body = read_http(fp) self.assertline(line, "500", "Internal Server Error", "HTTP/1.1") cl = int(headers["content-length"]) self.assertEqual(cl, len(response_body)) self.assertTrue(response_body.startswith(b"Internal Server Error")) self.assertEqual( sorted(headers.keys()), ["connection", "content-length", "content-type", "date", "server"] ) # connection has been closed self.send_check_error(to_send) self.assertRaises(ConnectionClosed, read_http, fp)
Base
1
def get_type_string(self) -> str: """ Get a string representation of type that should be used when declaring this property """ if self.required: return self._type_string return f"Optional[{self._type_string}]"
Base
1
def test_course_has_entrance_exam_in_student_attempts_reset(self): """ Test course has entrance exam id set while resetting attempts""" url = reverse('reset_student_attempts_for_entrance_exam', kwargs={'course_id': unicode(self.course.id)}) response = self.client.get(url, { 'all_students': True, 'delete_module': False, }) self.assertEqual(response.status_code, 400)
Compound
4
async def customize(self, ctx, command: str.lower, *, response: str = None): """ Customize the response to an action. You can use {0} or {user} to dynamically replace with the specified target of the action. Formats like {0.name} or {0.mention} can also be used. """ if not response: await self.config.guild(ctx.guild).clear_raw("custom", command) else: await self.config.guild(ctx.guild).set_raw("custom", command, value=response) await ctx.tick()
Base
1
def help_page(page_slug: str) -> str: """Fava's included documentation.""" if page_slug not in HELP_PAGES: abort(404) html = markdown2.markdown_path( (resource_path("help") / (page_slug + ".md")), extras=["fenced-code-blocks", "tables", "header-ids"], ) return render_template( "_layout.html", active_page="help", page_slug=page_slug, help_html=render_template_string( html, beancount_version=beancount_version, fava_version=fava_version, ), HELP_PAGES=HELP_PAGES, )
Base
1
def _configuration_oauth_helper(to_save): active_oauths = 0 reboot_required = False for element in oauthblueprints: if to_save["config_" + str(element['id']) + "_oauth_client_id"] != element['oauth_client_id'] \ or to_save["config_" + str(element['id']) + "_oauth_client_secret"] != element['oauth_client_secret']: reboot_required = True element['oauth_client_id'] = to_save["config_" + str(element['id']) + "_oauth_client_id"] element['oauth_client_secret'] = to_save["config_" + str(element['id']) + "_oauth_client_secret"] if to_save["config_" + str(element['id']) + "_oauth_client_id"] \ and to_save["config_" + str(element['id']) + "_oauth_client_secret"]: active_oauths += 1 element["active"] = 1 else: element["active"] = 0 ub.session.query(ub.OAuthProvider).filter(ub.OAuthProvider.id == element['id']).update( {"oauth_client_id": to_save["config_" + str(element['id']) + "_oauth_client_id"], "oauth_client_secret": to_save["config_" + str(element['id']) + "_oauth_client_secret"], "active": element["active"]}) return reboot_required
Base
1
def update_dir_structure_gdrive(book_id, first_author, renamed_author): book = calibre_db.get_book(book_id) authordir = book.path.split('/')[0] titledir = book.path.split('/')[1] new_authordir = rename_all_authors(first_author, renamed_author, gdrive=True) new_titledir = get_valid_filename(book.title, chars=96) + u" (" + str(book_id) + u")" if titledir != new_titledir: gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir) if gFile: gd.moveGdriveFileRemote(gFile, new_titledir) book.path = book.path.split('/')[0] + u'/' + new_titledir gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected else: return _(u'File %(file)s not found on Google Drive', file=book.path) # file not found if authordir != new_authordir and authordir not in renamed_author: gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir) if gFile: gd.moveGdriveFolderRemote(gFile, new_authordir) book.path = new_authordir + u'/' + book.path.split('/')[1] gd.updateDatabaseOnEdit(gFile['id'], book.path) else: return _(u'File %(file)s not found on Google Drive', file=authordir) # file not found # change location in database to new author/title path book.path = os.path.join(new_authordir, new_titledir).replace('\\', '/') return rename_files_on_change(first_author, renamed_author, book, gdrive=True)
Base
1
def test_tofile_sep(self): x = np.array([1.51, 2, 3.51, 4], dtype=float) f = open(self.filename, 'w') x.tofile(f, sep=',') f.close() f = open(self.filename, 'r') s = f.read() f.close() assert_equal(s, '1.51,2.0,3.51,4.0') os.unlink(self.filename)
Base
1
def load(self): if len(self.tile) != 1 or self.tile[0][0] != "iptc": return ImageFile.ImageFile.load(self) type, tile, box = self.tile[0] encoding, offset = tile self.fp.seek(offset) # Copy image data to temporary file outfile = tempfile.mktemp() o = open(outfile, "wb") if encoding == "raw": # To simplify access to the extracted file, # prepend a PPM header o.write("P5\n%d %d\n255\n" % self.size) while True: type, size = self.field() if type != (8, 10): break while size > 0: s = self.fp.read(min(size, 8192)) if not s: break o.write(s) size = size - len(s) o.close() try: try: # fast self.im = Image.core.open_ppm(outfile) except: # slightly slower im = Image.open(outfile) im.load() self.im = im.im finally: try: os.unlink(outfile) except: pass
Base
1
def is_valid_client_secret(client_secret): """Validate that a given string matches the client_secret regex defined by the spec :param client_secret: The client_secret to validate :type client_secret: unicode :return: Whether the client_secret is valid :rtype: bool """ return client_secret_regex.match(client_secret) is not None
Base
1
def make_homeserver(self, reactor, clock): self.fetches = [] def get_file(destination, path, output_stream, args=None, max_size=None): """ Returns tuple[int,dict,str,int] of file length, response headers, absolute URI, and response code. """ def write_to(r): data, response = r output_stream.write(data) return response d = Deferred() d.addCallback(write_to) self.fetches.append((d, destination, path, args)) return make_deferred_yieldable(d) client = Mock() client.get_file = get_file self.storage_path = self.mktemp() self.media_store_path = self.mktemp() os.mkdir(self.storage_path) os.mkdir(self.media_store_path) config = self.default_config() config["media_store_path"] = self.media_store_path config["thumbnail_requirements"] = {} config["max_image_pixels"] = 2000000 provider_config = { "module": "synapse.rest.media.v1.storage_provider.FileStorageProviderBackend", "store_local": True, "store_synchronous": False, "store_remote": True, "config": {"directory": self.storage_path}, } config["media_storage_providers"] = [provider_config] hs = self.setup_test_homeserver(config=config, http_client=client) return hs
Base
1
def __new__(cls, sourceName: str): """Dispatches to the right subclass.""" if cls != InputSource: # Only take control of calls to InputSource(...) itself. return super().__new__(cls) if sourceName == "-": return StdinInputSource(sourceName) if sourceName.startswith("https:"): return UrlInputSource(sourceName) return FileInputSource(sourceName)
Base
1
def feed_hot(): off = request.args.get("offset") or 0 all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by( func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id) hot_books = all_books.offset(off).limit(config.config_books_per_page) entries = list() for book in hot_books: downloadBook = calibre_db.get_book(book.Downloads.book_id) if downloadBook: entries.append( calibre_db.get_filtered_book(book.Downloads.book_id) ) else: ub.delete_download(book.Downloads.book_id) numBooks = entries.__len__() pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page, numBooks) return render_xml_template('feed.xml', entries=entries, pagination=pagination)
Base
1
def decompile(self): self.writeln("** Decompiling APK...", clr.OKBLUE) with ZipFile(self.file) as zipped: try: dex = self.tempdir + "/" + self.apk.package + ".dex" with open(dex, "wb") as classes: classes.write(zipped.read("classes.dex")) except Exception as e: sys.exit(self.writeln(str(e), clr.WARNING)) dec = "%s %s -d %s --deobf" % (self.jadx, dex, self.tempdir) os.system(dec) return self.tempdir
Base
1
def yet_another_upload_file(request): path = tempfile.mkdtemp() file_name = os.path.join(path, "yet_another_%s.txt" % request.node.name) with open(file_name, "w") as f: f.write(request.node.name) return file_name
Base
1
def test_received_trailer_startswith_lf(self): buf = DummyBuffer() inst = self._makeOne(buf) inst.all_chunks_received = True result = inst.received(b"\n") self.assertEqual(result, 1) self.assertEqual(inst.completed, True)
Base
1
def start_requests(self): yield SplashRequest(self.url, endpoint='execute', args={'lua_source': DEFAULT_SCRIPT})
Class
2
def test_uses_default(self): account_info = self._make_sqlite_account_info( env={ 'HOME': self.home, 'USERPROFILE': self.home, } ) actual_path = os.path.abspath(account_info.filename) assert os.path.join(self.home, '.b2_account_info') == actual_path
Base
1
def test_is_valid_hostname(self): """Tests that the is_valid_hostname function accepts only valid hostnames (or domain names), with optional port number. """ self.assertTrue(is_valid_hostname("example.com")) self.assertTrue(is_valid_hostname("EXAMPLE.COM")) self.assertTrue(is_valid_hostname("ExAmPlE.CoM")) self.assertTrue(is_valid_hostname("example.com:4242")) self.assertTrue(is_valid_hostname("localhost")) self.assertTrue(is_valid_hostname("localhost:9000")) self.assertTrue(is_valid_hostname("a.b:1234")) self.assertFalse(is_valid_hostname("example.com:65536")) self.assertFalse(is_valid_hostname("example.com:0")) self.assertFalse(is_valid_hostname("example.com:a")) self.assertFalse(is_valid_hostname("example.com:04242")) self.assertFalse(is_valid_hostname("example.com: 4242")) self.assertFalse(is_valid_hostname("example.com/example.com")) self.assertFalse(is_valid_hostname("example.com#example.com"))
Base
1
def test_reset_student_attempts_invalid_entrance_exam(self): """ Test reset for invalid entrance exam. """ url = reverse('reset_student_attempts_for_entrance_exam', kwargs={'course_id': unicode(self.course_with_invalid_ee.id)}) response = self.client.get(url, { 'unique_student_identifier': self.student.email, }) self.assertEqual(response.status_code, 400)
Compound
4
def test_module(self): body = [ast.Num(42)] x = ast.Module(body) self.assertEqual(x.body, body)
Base
1
def filter(self, names): for name in [_hkey(n) for n in names]: if name in self.dict: del self.dict[name]
Base
1
def _redirect_request_using_get(self, request, redirect_url): redirected = request.replace(url=redirect_url, method='GET', body='') redirected.headers.pop('Content-Type', None) redirected.headers.pop('Content-Length', None) return redirected
Class
2
def main(): app = create_app() init_errorhandler() app.register_blueprint(web) app.register_blueprint(opds) app.register_blueprint(jinjia) app.register_blueprint(about) app.register_blueprint(shelf) app.register_blueprint(admi) app.register_blueprint(remotelogin) app.register_blueprint(meta) app.register_blueprint(gdrive) app.register_blueprint(editbook) if kobo_available: app.register_blueprint(kobo) app.register_blueprint(kobo_auth) if oauth_available: app.register_blueprint(oauth) success = web_server.start() sys.exit(0 if success else 1)
Base
1
def _get_index_absolute_path(index): return os.path.join(INDEXDIR, index)
Base
1
def image(self, request, pk): obj = self.get_object() if obj.get_space() != request.space: raise PermissionDenied(detail='You do not have the required permission to perform this action', code=403) serializer = self.serializer_class(obj, data=request.data, partial=True) if serializer.is_valid(): serializer.save() image = None filetype = ".jpeg" # fall-back to .jpeg, even if wrong, at least users will know it's an image and most image viewers can open it correctly anyways if 'image' in serializer.validated_data: image = obj.image filetype = mimetypes.guess_extension(serializer.validated_data['image'].content_type) or filetype elif 'image_url' in serializer.validated_data: try: response = requests.get(serializer.validated_data['image_url']) image = File(io.BytesIO(response.content)) filetype = mimetypes.guess_extension(response.headers['content-type']) or filetype except UnidentifiedImageError as e: print(e) pass except MissingSchema as e: print(e) pass except Exception as e: print(e) pass if image is not None: img = handle_image(request, image, filetype) obj.image = File(img, name=f'{uuid.uuid4()}_{obj.pk}{filetype}') obj.save() return Response(serializer.data) return Response(serializer.errors, 400)
Base
1
def parse(self, response): yield {'response': response}
Class
2
def _parse_cache_control(headers): retval = {} if "cache-control" in headers: parts = headers["cache-control"].split(",") parts_with_args = [ tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") ] parts_wo_args = [ (name.strip().lower(), 1) for name in parts if -1 == name.find("=") ] retval = dict(parts_with_args + parts_wo_args) return retval
Class
2
async def check_credentials(username, password): return password == "iloveyou"
Base
1
def load(self): config_type = type(self).__name__.lower() try: with self.path.open(encoding=UTF8) as f: try: data = json.load(f) except ValueError as e: raise ConfigFileError( f'invalid {config_type} file: {e} [{self.path}]' ) self.update(data) except FileNotFoundError: pass except OSError as e: raise ConfigFileError(f'cannot read {config_type} file: {e}')
Class
2
def test_received_headers_finished_expect_continue_false(self): inst, sock, map = self._makeOneWithMap() inst.server = DummyServer() preq = DummyParser() inst.request = preq preq.expect_continue = False preq.headers_finished = True preq.completed = False preq.empty = False preq.retval = 1 inst.received(b"GET / HTTP/1.1\n\n") self.assertEqual(inst.request, preq) self.assertEqual(inst.server.tasks, []) self.assertEqual(inst.outbufs[0].get(100), b"")
Base
1
def edit_single_cc_data(book_id, book, column_id, to_save): cc = (calibre_db.session.query(db.Custom_Columns) .filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)) .filter(db.Custom_Columns.id == column_id) .all()) return edit_cc_data(book_id, book, to_save, cc)
Base
1
def __init__(self, **kwargs): self.basic_auth = get_anymail_setting('webhook_authorization', default=[], kwargs=kwargs) # no esp_name -- auth is shared between ESPs # Allow a single string: if isinstance(self.basic_auth, six.string_types): self.basic_auth = [self.basic_auth] if self.warn_if_no_basic_auth and len(self.basic_auth) < 1: warnings.warn( "Your Anymail webhooks are insecure and open to anyone on the web. " "You should set WEBHOOK_AUTHORIZATION in your ANYMAIL settings. " "See 'Securing webhooks' in the Anymail docs.", AnymailInsecureWebhookWarning) # noinspection PyArgumentList super(AnymailBasicAuthMixin, self).__init__(**kwargs)
Base
1