_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q1100
attachment_delete_link
train
def attachment_delete_link(context, attachment): """ Renders a html link to the delete view of the given attachment. Returns no content if the request-user has no permission to delete attachments. The user must own either the ``attachments.delete_attachment`` permission and is the creator of the attachment, that he can delete it or he has ``attachments.delete_foreign_attachments`` which allows him to delete all attachments. """ if context['user'].has_perm('attachments.delete_foreign_attachments') or ( context['user'] == attachment.creator and context['user'].has_perm('attachments.delete_attachment') ): return { 'next': context.request.build_absolute_uri(), 'delete_url': reverse( 'attachments:delete', kwargs={'attachment_pk': attachment.pk} ), } return {'delete_url': None}
python
{ "resource": "" }
q1101
PyHeat.show_heatmap
train
def show_heatmap(self, blocking=True, output_file=None, enable_scroll=False): """Method to actually display the heatmap created. @param blocking: When set to False makes an unblocking plot show. @param output_file: If not None the heatmap image is output to this file. Supported formats: (eps, pdf, pgf, png, ps, raw, rgba, svg, svgz) @param enable_scroll: Flag used add a scroll bar to scroll long files. """ if output_file is None: if enable_scroll: # Add a new axes which will be used as scroll bar. axpos = plt.axes([0.12, 0.1, 0.625, 0.03]) spos = Slider(axpos, "Scroll", 10, len(self.pyfile.lines)) def update(val): """Method to update position when slider is moved.""" pos = spos.val self.ax.axis([0, 1, pos, pos - 10]) self.fig.canvas.draw_idle() spos.on_changed(update) plt.show(block=blocking) else: plt.savefig(output_file)
python
{ "resource": "" }
q1102
PyHeat.__profile_file
train
def __profile_file(self): """Method used to profile the given file line by line.""" self.line_profiler = pprofile.Profile() self.line_profiler.runfile( open(self.pyfile.path, "r"), {}, self.pyfile.path )
python
{ "resource": "" }
q1103
PyHeat.__get_line_profile_data
train
def __get_line_profile_data(self): """Method to procure line profiles. @return: Line profiles if the file has been profiles else empty dictionary. """ if self.line_profiler is None: return {} # the [0] is because pprofile.Profile.file_dict stores the line_dict # in a list so that it can be modified in a thread-safe way # see https://github.com/vpelletier/pprofile/blob/da3d60a1b59a061a0e2113bf768b7cb4bf002ccb/pprofile.py#L398 return self.line_profiler.file_dict[self.pyfile.path][0].line_dict
python
{ "resource": "" }
q1104
PyHeat.__fetch_heatmap_data_from_profile
train
def __fetch_heatmap_data_from_profile(self): """Method to create heatmap data from profile information.""" # Read lines from file. with open(self.pyfile.path, "r") as file_to_read: for line in file_to_read: # Remove return char from the end of the line and add a # space in the beginning for better visibility. self.pyfile.lines.append(" " + line.strip("\n")) # Total number of lines in file. self.pyfile.length = len(self.pyfile.lines) # Fetch line profiles. line_profiles = self.__get_line_profile_data() # Creating an array of data points. As the profile keys are 1 indexed # we should range from 1 to line_count + 1 and not 0 to line_count. arr = [] for line_num in range(1, self.pyfile.length + 1): if line_num in line_profiles: # line_profiles[i] will have multiple entries if line i is # invoked from multiple places in the code. Here we sum over # each invocation to get the total time spent on that line. line_times = [ ltime for _, ltime in line_profiles[line_num].values() ] arr.append([sum(line_times)]) else: arr.append([0.0]) # Create nd-array from list of data points. self.pyfile.data = np.array(arr)
python
{ "resource": "" }
q1105
PyHeat.__create_heatmap_plot
train
def __create_heatmap_plot(self): """Method to actually create the heatmap from profile stats.""" # Define the heatmap plot. height = len(self.pyfile.lines) / 3 width = max(map(lambda x: len(x), self.pyfile.lines)) / 8 self.fig, self.ax = plt.subplots(figsize=(width, height)) # Set second sub plot to occupy bottom 20% plt.subplots_adjust(bottom=0.20) # Heat scale orange to red heatmap = self.ax.pcolor(self.pyfile.data, cmap="OrRd") # X Axis # Remove X axis. self.ax.xaxis.set_visible(False) # Y Axis # Create lables for y-axis ticks row_labels = range(1, self.pyfile.length + 1) # Set y-tick labels. self.ax.set_yticklabels(row_labels, minor=False) # Put y-axis major ticks at the middle of each cell. self.ax.set_yticks(np.arange(self.pyfile.data.shape[0]) + 0.5, minor=False) # Inver y-axis to have top down line numbers self.ax.invert_yaxis() # Plot definitions # Set plot y-axis label. plt.ylabel("Line Number") # Annotate each cell with lines in file in order. max_time_spent_on_a_line = max(self.pyfile.data) for i, line in enumerate(self.pyfile.lines): # In order to ensure easy readability of the code, we need to # invert colour of text display for darker colours which # correspond to higher amount of time spent on the line. if self.pyfile.data[i] >= 0.7 * max_time_spent_on_a_line: color = (1.0, 1.0, 1.0) # White text else: color = (0.0, 0.0, 0.0) # Black text plt.text( 0.0, i + 0.5, line, ha="left", va="center", color=color, clip_on=True, ) # Define legend cbar = plt.colorbar(heatmap) cbar.set_label("# of seconds")
python
{ "resource": "" }
q1106
Money.round
train
def round(self, ndigits=0): """ Rounds the amount using the current ``Decimal`` rounding algorithm. """ if ndigits is None: ndigits = 0 return self.__class__( amount=self.amount.quantize(Decimal('1e' + str(-ndigits))), currency=self.currency)
python
{ "resource": "" }
q1107
run
train
def run(): """CLI endpoint.""" sys.path.insert(0, os.getcwd()) logging.basicConfig(level=logging.INFO, handlers=[logging.StreamHandler()]) parser = argparse.ArgumentParser(description="Manage Application", add_help=False) parser.add_argument('app', metavar='app', type=str, help='Application module path') parser.add_argument('--config', type=str, help='Path to configuration.') parser.add_argument('--version', action="version", version=__version__) args_, subargs_ = parser.parse_known_args(sys.argv[1:]) if args_.config: os.environ[CONFIGURATION_ENVIRON_VARIABLE] = args_.config from gunicorn.util import import_app app_uri = args_.app if ':' not in app_uri: app_uri += ':app' try: app = import_app(app_uri) app.uri = app_uri app.logger.info('Application is loaded: %s' % app.name) except Exception as exc: logging.exception(exc) raise sys.exit(1) app.manage(*subargs_, prog='muffin %s' % args_.app)
python
{ "resource": "" }
q1108
Manager.command
train
def command(self, init=False): """Define CLI command.""" def wrapper(func): header = '\n'.join([s for s in (func.__doc__ or '').split('\n') if not s.strip().startswith(':')]) parser = self.parsers.add_parser(func.__name__, description=header) args, vargs, kw, defs, kwargs, kwdefs, anns = inspect.getfullargspec(func) defs = defs or [] kwargs_ = dict(zip(args[-len(defs):], defs)) docs = dict(PARAM_RE.findall(func.__doc__ or "")) def process_arg(name, *, value=..., **opts): argname = name.replace('_', '-').lower() arghelp = docs.get(vargs, '') if value is ...: return parser.add_argument(argname, help=arghelp, **opts) if isinstance(value, bool): if value: return parser.add_argument( "--no-" + argname, dest=name, action="store_false", help="Disable %s" % (arghelp or name).lower()) return parser.add_argument( "--" + argname, dest=name, action="store_true", help="Enable %s" % (arghelp or name).lower()) if isinstance(value, list): return parser.add_argument( "--" + argname, action="append", default=value, help=arghelp) return parser.add_argument( "--" + argname, type=anns.get(name, type(value)), default=value, help=arghelp + ' [%s]' % repr(value)) if vargs: process_arg('*', nargs="*", metavar=vargs) for name, value in (kwdefs or {}).items(): process_arg(name, value=value) for name in args: process_arg(name, value=kwargs_.get(name, ...)) self.handlers[func.__name__] = func func.parser = parser return func if callable(init): init.__init__ = True return wrapper(init) def decorator(func): func.__init__ = bool(init) return wrapper(func) return decorator
python
{ "resource": "" }
q1109
routes_register
train
def routes_register(app, handler, *paths, methods=None, router=None, name=None): """Register routes.""" if router is None: router = app.router handler = to_coroutine(handler) resources = [] for path in paths: # Register any exception to app if isinstance(path, type) and issubclass(path, BaseException): app._error_handlers[path] = handler continue # Ensure that names are unique name = str(name or '') rname, rnum = name, 2 while rname in router: rname = "%s%d" % (name, rnum) rnum += 1 path = parse(path) if isinstance(path, RETYPE): resource = RawReResource(path, name=rname) router.register_resource(resource) else: resource = router.add_resource(path, name=rname) for method in methods or [METH_ANY]: method = method.upper() resource.add_route(method, handler) resources.append(resource) return resources
python
{ "resource": "" }
q1110
parse
train
def parse(path): """Parse URL path and convert it to regexp if needed.""" parsed = re.sre_parse.parse(path) for case, _ in parsed: if case not in (re.sre_parse.LITERAL, re.sre_parse.ANY): break else: return path path = path.strip('^$') def parse_(match): [part] = match.groups() match = DYNR_RE.match(part) params = match.groupdict() return '(?P<%s>%s)' % (params['var'], params['re'] or '[^{}/]+') return re.compile('^%s$' % DYNS_RE.sub(parse_, path))
python
{ "resource": "" }
q1111
RawReResource.url_for
train
def url_for(self, *subgroups, **groups): """Build URL.""" parsed = re.sre_parse.parse(self._pattern.pattern) subgroups = {n:str(v) for n, v in enumerate(subgroups, 1)} groups_ = dict(parsed.pattern.groupdict) subgroups.update({ groups_[k0]: str(v0) for k0, v0 in groups.items() if k0 in groups_ }) path = ''.join(str(val) for val in Traverser(parsed, subgroups)) return URL.build(path=path, encoded=True)
python
{ "resource": "" }
q1112
Traverser.state_not_literal
train
def state_not_literal(self, value): """Parse not literal.""" value = negate = chr(value) while value == negate: value = choice(self.literals) yield value
python
{ "resource": "" }
q1113
Traverser.state_max_repeat
train
def state_max_repeat(self, value): """Parse repeatable parts.""" min_, max_, value = value value = [val for val in Traverser(value, self.groups)] if not min_ and max_: for val in value: if isinstance(val, required): min_ = 1 break for val in value * min_: yield val
python
{ "resource": "" }
q1114
Traverser.state_in
train
def state_in(self, value): """Parse ranges.""" value = [val for val in Traverser(value, self.groups)] if not value or not value[0]: for val in self.literals - set(value): return (yield val) yield value[0]
python
{ "resource": "" }
q1115
Traverser.state_category
train
def state_category(value): """Parse categories.""" if value == re.sre_parse.CATEGORY_DIGIT: return (yield '0') if value == re.sre_parse.CATEGORY_WORD: return (yield 'x')
python
{ "resource": "" }
q1116
create_signature
train
def create_signature(secret, value, digestmod='sha256', encoding='utf-8'): """ Create HMAC Signature from secret for value. """ if isinstance(secret, str): secret = secret.encode(encoding) if isinstance(value, str): value = value.encode(encoding) if isinstance(digestmod, str): digestmod = getattr(hashlib, digestmod, hashlib.sha1) hm = hmac.new(secret, digestmod=digestmod) hm.update(value) return hm.hexdigest()
python
{ "resource": "" }
q1117
check_signature
train
def check_signature(signature, *args, **kwargs): """ Check for the signature is correct. """ return hmac.compare_digest(signature, create_signature(*args, **kwargs))
python
{ "resource": "" }
q1118
generate_password_hash
train
def generate_password_hash(password, digestmod='sha256', salt_length=8): """ Hash a password with given method and salt length. """ salt = ''.join(random.sample(SALT_CHARS, salt_length)) signature = create_signature(salt, password, digestmod=digestmod) return '$'.join((digestmod, salt, signature))
python
{ "resource": "" }
q1119
import_submodules
train
def import_submodules(package_name, *submodules): """Import all submodules by package name.""" package = sys.modules[package_name] return { name: importlib.import_module(package_name + '.' + name) for _, name, _ in pkgutil.walk_packages(package.__path__) if not submodules or name in submodules }
python
{ "resource": "" }
q1120
register
train
def register(*paths, methods=None, name=None, handler=None): """Mark Handler.method to aiohttp handler. It uses when registration of the handler with application is postponed. :: class AwesomeHandler(Handler): def get(self, request): return "I'm awesome!" @register('/awesome/best') def best(self, request): return "I'm best!" """ def wrapper(method): """Store route params into method.""" method = to_coroutine(method) setattr(method, ROUTE_PARAMS_ATTR, (paths, methods, name)) if handler and not hasattr(handler, method.__name__): setattr(handler, method.__name__, method) return method return wrapper
python
{ "resource": "" }
q1121
Handler.from_view
train
def from_view(cls, view, *methods, name=None): """Create a handler class from function or coroutine.""" docs = getattr(view, '__doc__', None) view = to_coroutine(view) methods = methods or ['GET'] if METH_ANY in methods: methods = METH_ALL def proxy(self, *args, **kwargs): return view(*args, **kwargs) params = {m.lower(): proxy for m in methods} params['methods'] = methods if docs: params['__doc__'] = docs return type(name or view.__name__, (cls,), params)
python
{ "resource": "" }
q1122
Handler.bind
train
def bind(cls, app, *paths, methods=None, name=None, router=None, view=None): """Bind to the given application.""" cls.app = app if cls.app is not None: for _, m in inspect.getmembers(cls, predicate=inspect.isfunction): if not hasattr(m, ROUTE_PARAMS_ATTR): continue paths_, methods_, name_ = getattr(m, ROUTE_PARAMS_ATTR) name_ = name_ or ("%s.%s" % (cls.name, m.__name__)) delattr(m, ROUTE_PARAMS_ATTR) cls.app.register(*paths_, methods=methods_, name=name_, handler=cls)(m) @coroutine @functools.wraps(cls) def handler(request): return cls().dispatch(request, view=view) if not paths: paths = ["/%s" % cls.__name__] return routes_register( app, handler, *paths, methods=methods, router=router, name=name or cls.name)
python
{ "resource": "" }
q1123
Handler.register
train
def register(cls, *args, **kwargs): """Register view to handler.""" if cls.app is None: return register(*args, handler=cls, **kwargs) return cls.app.register(*args, handler=cls, **kwargs)
python
{ "resource": "" }
q1124
Handler.dispatch
train
async def dispatch(self, request, view=None, **kwargs): """Dispatch request.""" if view is None and request.method not in self.methods: raise HTTPMethodNotAllowed(request.method, self.methods) method = getattr(self, view or request.method.lower()) response = await method(request, **kwargs) return await self.make_response(request, response)
python
{ "resource": "" }
q1125
Handler.parse
train
async def parse(self, request): """Return a coroutine which parses data from request depends on content-type. Usage: :: def post(self, request): data = await self.parse(request) # ... """ if request.content_type in {'application/x-www-form-urlencoded', 'multipart/form-data'}: return await request.post() if request.content_type == 'application/json': return await request.json() return await request.text()
python
{ "resource": "" }
q1126
Application.cfg
train
def cfg(self): """Load the application configuration. This method loads configuration from python module. """ config = LStruct(self.defaults) module = config['CONFIG'] = os.environ.get( CONFIGURATION_ENVIRON_VARIABLE, config['CONFIG']) if module: try: module = import_module(module) config.update({ name: getattr(module, name) for name in dir(module) if name == name.upper() and not name.startswith('_') }) except ImportError as exc: config.CONFIG = None self.logger.error("Error importing %s: %s", module, exc) # Patch configuration from ENV for name in config: if name.startswith('_') or name != name.upper() or name not in os.environ: continue try: config[name] = json.loads(os.environ[name]) except ValueError: pass return config
python
{ "resource": "" }
q1127
Application.install
train
def install(self, plugin, name=None, **opts): """Install plugin to the application.""" source = plugin if isinstance(plugin, str): module, _, attr = plugin.partition(':') module = import_module(module) plugin = getattr(module, attr or 'Plugin', None) if isinstance(plugin, types.ModuleType): plugin = getattr(module, 'Plugin', None) if plugin is None: raise MuffinException('Plugin is not found %r' % source) name = name or plugin.name if name in self.ps: raise MuffinException('Plugin with name `%s` is already intalled.' % name) if isinstance(plugin, type): plugin = plugin(**opts) if hasattr(plugin, 'setup'): plugin.setup(self) if hasattr(plugin, 'middleware') and plugin.middleware not in self.middlewares: self.middlewares.append(plugin.middleware) if hasattr(plugin, 'startup'): self.on_startup.append(plugin.startup) if hasattr(plugin, 'cleanup'): self.on_cleanup.append(plugin.cleanup) # Save plugin links self.ps[name] = plugin return plugin
python
{ "resource": "" }
q1128
check_honeypot
train
def check_honeypot(func=None, field_name=None): """ Check request.POST for valid honeypot field. Takes an optional field_name that defaults to HONEYPOT_FIELD_NAME if not specified. """ # hack to reverse arguments if called with str param if isinstance(func, six.string_types): func, field_name = field_name, func def decorated(func): def inner(request, *args, **kwargs): response = verify_honeypot_value(request, field_name) if response: return response else: return func(request, *args, **kwargs) return wraps(func, assigned=available_attrs(func))(inner) if func is None: def decorator(func): return decorated(func) return decorator return decorated(func)
python
{ "resource": "" }
q1129
honeypot_exempt
train
def honeypot_exempt(view_func): """ Mark view as exempt from honeypot validation """ # borrowing liberally from django's csrf_exempt def wrapped(*args, **kwargs): return view_func(*args, **kwargs) wrapped.honeypot_exempt = True return wraps(view_func, assigned=available_attrs(view_func))(wrapped)
python
{ "resource": "" }
q1130
main
train
def main(): """Generate a badge based on command line arguments.""" # Parse command line arguments args = parse_args() label = args.label threshold_text = args.args suffix = args.suffix # Check whether thresholds were sent as one word, and is in the # list of templates. If so, swap in the template. if len(args.args) == 1 and args.args[0] in BADGE_TEMPLATES: template_name = args.args[0] template_dict = BADGE_TEMPLATES[template_name] threshold_text = template_dict['threshold'].split(' ') if not args.label: label = template_dict['label'] if not args.suffix and 'suffix' in template_dict: suffix = template_dict['suffix'] if not label: raise ValueError('Label has not been set. Please use --label argument.') # Create threshold list from args threshold_list = [x.split('=') for x in threshold_text] threshold_dict = {x[0]: x[1] for x in threshold_list} # Create badge object badge = Badge(label, args.value, value_prefix=args.prefix, value_suffix=suffix, default_color=args.color, num_padding_chars=args.padding, font_name=args.font, font_size=args.font_size, template=args.template, use_max_when_value_exceeds=args.use_max, thresholds=threshold_dict, value_format=args.value_format, text_color=args.text_color) if args.file: # Write badge SVG to file badge.write_badge(args.file, overwrite=args.overwrite) else: print(badge.badge_svg_text)
python
{ "resource": "" }
q1131
Badge.value_is_int
train
def value_is_int(self): """Identify whether the value text is an int.""" try: a = float(self.value) b = int(a) except ValueError: return False else: return a == b
python
{ "resource": "" }
q1132
Badge.font_width
train
def font_width(self): """Return the badge font width.""" return self.get_font_width(font_name=self.font_name, font_size=self.font_size)
python
{ "resource": "" }
q1133
Badge.color_split_position
train
def color_split_position(self): """The SVG x position where the color split should occur.""" return self.get_text_width(' ') + self.label_width + \ int(float(self.font_width) * float(self.num_padding_chars))
python
{ "resource": "" }
q1134
Badge.badge_width
train
def badge_width(self): """The total width of badge. >>> badge = Badge('pylint', '5', font_name='DejaVu Sans,Verdana,Geneva,sans-serif', ... font_size=11) >>> badge.badge_width 91 """ return self.get_text_width(' ' + ' ' * int(float(self.num_padding_chars) * 2.0)) \ + self.label_width + self.value_width
python
{ "resource": "" }
q1135
Badge.badge_svg_text
train
def badge_svg_text(self): """The badge SVG text.""" # Identify whether template is a file or the actual template text if len(self.template.split('\n')) == 1: with open(self.template, mode='r') as file_handle: badge_text = file_handle.read() else: badge_text = self.template return badge_text.replace('{{ badge width }}', str(self.badge_width)) \ .replace('{{ font name }}', self.font_name) \ .replace('{{ font size }}', str(self.font_size)) \ .replace('{{ label }}', self.label) \ .replace('{{ value }}', self.value_text) \ .replace('{{ label anchor }}', str(self.label_anchor)) \ .replace('{{ label anchor shadow }}', str(self.label_anchor_shadow)) \ .replace('{{ value anchor }}', str(self.value_anchor)) \ .replace('{{ value anchor shadow }}', str(self.value_anchor_shadow)) \ .replace('{{ color }}', self.badge_color_code) \ .replace('{{ label text color }}', self.label_text_color) \ .replace('{{ value text color }}', self.value_text_color) \ .replace('{{ color split x }}', str(self.color_split_position)) \ .replace('{{ value width }}', str(self.badge_width - self.color_split_position))
python
{ "resource": "" }
q1136
Badge.get_text_width
train
def get_text_width(self, text): """Return the width of text. This implementation assumes a fixed font of: font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11" >>> badge = Badge('x', 1, font_name='DejaVu Sans,Verdana,Geneva,sans-serif', font_size=11) >>> badge.get_text_width('pylint') 42 """ return len(text) * self.get_font_width(self.font_name, self.font_size)
python
{ "resource": "" }
q1137
Badge.badge_color
train
def badge_color(self): """Find the badge color based on the thresholds.""" # If no thresholds were passed then return the default color if not self.thresholds: return self.default_color if self.value_type == str: if self.value in self.thresholds: return self.thresholds[self.value] else: return self.default_color # Convert the threshold dictionary into a sorted list of lists threshold_list = [[self.value_type(i[0]), i[1]] for i in self.thresholds.items()] threshold_list.sort(key=lambda x: x[0]) color = None for threshold, color in threshold_list: if float(self.value) < float(threshold): return color # If we drop out the top of the range then return the last max color if color and self.use_max_when_value_exceeds: return color else: return self.default_color
python
{ "resource": "" }
q1138
Badge.write_badge
train
def write_badge(self, file_path, overwrite=False): """Write badge to file.""" # Validate path (part 1) if file_path.endswith('/'): raise Exception('File location may not be a directory.') # Get absolute filepath path = os.path.abspath(file_path) if not path.lower().endswith('.svg'): path += '.svg' # Validate path (part 2) if not overwrite and os.path.exists(path): raise Exception('File "{}" already exists.'.format(path)) with open(path, mode='w') as file_handle: file_handle.write(self.badge_svg_text)
python
{ "resource": "" }
q1139
main
train
def main(): """Run server.""" global DEFAULT_SERVER_PORT, DEFAULT_SERVER_LISTEN_ADDRESS, DEFAULT_LOGGING_LEVEL # Check for environment variables if 'ANYBADGE_PORT' in environ: DEFAULT_SERVER_PORT = environ['ANYBADGE_PORT'] if 'ANYBADGE_LISTEN_ADDRESS' in environ: DEFAULT_SERVER_LISTEN_ADDRESS = environ['ANYBADGE_LISTEN_ADDRESS'] if 'ANYBADGE_LOG_LEVEL' in environ: DEFAULT_LOGGING_LEVEL = logging.getLevelName(environ['ANYBADGE_LOG_LEVEL']) # Parse command line args args = parse_args() # Set logging level logging_level = DEFAULT_LOGGING_LEVEL if args.debug: logging_level = logging.DEBUG logging.basicConfig(format='%(asctime)-15s %(levelname)s:%(filename)s(%(lineno)d):%(funcName)s: %(message)s', level=logging_level) logger.info('Starting up anybadge server.') run(listen_address=args.listen_address, port=args.port)
python
{ "resource": "" }
q1140
get_object_name
train
def get_object_name(obj): """ Return the name of a given object """ name_dispatch = { ast.Name: "id", ast.Attribute: "attr", ast.Call: "func", ast.FunctionDef: "name", ast.ClassDef: "name", ast.Subscript: "value", } # This is a new ast type in Python 3 if hasattr(ast, "arg"): name_dispatch[ast.arg] = "arg" while not isinstance(obj, str): assert type(obj) in name_dispatch obj = getattr(obj, name_dispatch[type(obj)]) return obj
python
{ "resource": "" }
q1141
get_attribute_name_id
train
def get_attribute_name_id(attr): """ Return the attribute name identifier """ return attr.value.id if isinstance(attr.value, ast.Name) else None
python
{ "resource": "" }
q1142
is_class_method_bound
train
def is_class_method_bound(method, arg_name=BOUND_METHOD_ARGUMENT_NAME): """ Return whether a class method is bound to the class """ if not method.args.args: return False first_arg = method.args.args[0] first_arg_name = get_object_name(first_arg) return first_arg_name == arg_name
python
{ "resource": "" }
q1143
get_class_methods
train
def get_class_methods(cls): """ Return methods associated with a given class """ return [ node for node in cls.body if isinstance(node, ast.FunctionDef) ]
python
{ "resource": "" }
q1144
get_class_variables
train
def get_class_variables(cls): """ Return class variables associated with a given class """ return [ target for node in cls.body if isinstance(node, ast.Assign) for target in node.targets ]
python
{ "resource": "" }
q1145
get_instance_variables
train
def get_instance_variables(node, bound_name_classifier=BOUND_METHOD_ARGUMENT_NAME): """ Return instance variables used in an AST node """ node_attributes = [ child for child in ast.walk(node) if isinstance(child, ast.Attribute) and get_attribute_name_id(child) == bound_name_classifier ] node_function_call_names = [ get_object_name(child) for child in ast.walk(node) if isinstance(child, ast.Call) ] node_instance_variables = [ attribute for attribute in node_attributes if get_object_name(attribute) not in node_function_call_names ] return node_instance_variables
python
{ "resource": "" }
q1146
get_module_classes
train
def get_module_classes(node): """ Return classes associated with a given module """ return [ child for child in ast.walk(node) if isinstance(child, ast.ClassDef) ]
python
{ "resource": "" }
q1147
recursively_get_files_from_directory
train
def recursively_get_files_from_directory(directory): """ Return all filenames under recursively found in a directory """ return [ os.path.join(root, filename) for root, directories, filenames in os.walk(directory) for filename in filenames ]
python
{ "resource": "" }
q1148
ProtoChain.index
train
def index(self, value, start=None, stop=None): """Return first index of value.""" return self.__alias__.index(value, start, stop)
python
{ "resource": "" }
q1149
OSPF.read_ospf
train
def read_ospf(self, length): """Read Open Shortest Path First. Structure of OSPF header [RFC 2328]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Version # | Type | Packet length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Router ID | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Area ID | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Checksum | AuType | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Authentication | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Authentication | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ospf.version Version # 1 8 ospf.type Type (0/1) 2 16 ospf.len Packet Length (header includes) 4 32 ospf.router_id Router ID 8 64 ospf.area_id Area ID 12 96 ospf.chksum Checksum 14 112 ospf.autype AuType 16 128 ospf.auth Authentication """ if length is None: length = len(self) _vers = self._read_unpack(1) _type = self._read_unpack(1) _tlen = self._read_unpack(2) _rtid = self._read_id_numbers() _area = self._read_id_numbers() _csum = self._read_fileng(2) _autp = self._read_unpack(2) ospf = dict( version=_vers, type=TYPE.get(_type), len=_tlen, router_id=_rtid, area_id=_area, chksum=_csum, autype=AUTH.get(_autp) or 'Reserved', ) if _autp == 2: ospf['auth'] = self._read_encrypt_auth() else: ospf['auth'] = self._read_fileng(8) length = ospf['len'] - 24 ospf['packet'] = self._read_packet(header=24, payload=length) return self._decode_next_layer(ospf, length)
python
{ "resource": "" }
q1150
OSPF._read_id_numbers
train
def _read_id_numbers(self): """Read router and area IDs.""" _byte = self._read_fileng(4) _addr = '.'.join([str(_) for _ in _byte]) return _addr
python
{ "resource": "" }
q1151
OSPF._read_encrypt_auth
train
def _read_encrypt_auth(self): """Read Authentication field when Cryptographic Authentication is employed. Structure of Cryptographic Authentication [RFC 2328]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | 0 | Key ID | Auth Data Len | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Cryptographic sequence number | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 - Reserved (must be zero) 2 16 ospf.auth.key_id Key ID 3 24 ospf.auth.len Auth Data Length 4 32 ospf.auth.seq Cryptographic Sequence Number """ _resv = self._read_fileng(2) _keys = self._read_unpack(1) _alen = self._read_unpack(1) _seqn = self._read_unpack(4) auth = dict( key_id=_keys, len=_alen, seq=_seqn, ) return auth
python
{ "resource": "" }
q1152
int_check
train
def int_check(*args, func=None): """Check if arguments are integrals.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, numbers.Integral): name = type(var).__name__ raise ComplexError( f'Function {func} expected integral number, {name} got instead.')
python
{ "resource": "" }
q1153
real_check
train
def real_check(*args, func=None): """Check if arguments are real numbers.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, numbers.Real): name = type(var).__name__ raise ComplexError( f'Function {func} expected real number, {name} got instead.')
python
{ "resource": "" }
q1154
complex_check
train
def complex_check(*args, func=None): """Check if arguments are complex numbers.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, numbers.Complex): name = type(var).__name__ raise ComplexError( f'Function {func} expected complex number, {name} got instead.')
python
{ "resource": "" }
q1155
number_check
train
def number_check(*args, func=None): """Check if arguments are numbers.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, numbers.Number): name = type(var).__name__ raise DigitError( f'Function {func} expected number, {name} got instead.')
python
{ "resource": "" }
q1156
bytearray_check
train
def bytearray_check(*args, func=None): """Check if arguments are bytearray type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (bytearray, collections.abc.ByteString, collections.abc.MutableSequence)): name = type(var).__name__ raise BytearrayError( f'Function {func} expected bytearray, {name} got instead.')
python
{ "resource": "" }
q1157
str_check
train
def str_check(*args, func=None): """Check if arguments are str type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (str, collections.UserString, collections.abc.Sequence)): name = type(var).__name__ raise StringError( f'Function {func} expected str, {name} got instead.')
python
{ "resource": "" }
q1158
list_check
train
def list_check(*args, func=None): """Check if arguments are list type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (list, collections.UserList, collections.abc.MutableSequence)): name = type(var).__name__ raise ListError( f'Function {func} expected list, {name} got instead.')
python
{ "resource": "" }
q1159
dict_check
train
def dict_check(*args, func=None): """Check if arguments are dict type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (dict, collections.UserDict, collections.abc.MutableMapping)): name = type(var).__name__ raise DictError( f'Function {func} expected dict, {name} got instead.')
python
{ "resource": "" }
q1160
tuple_check
train
def tuple_check(*args, func=None): """Check if arguments are tuple type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (tuple, collections.abc.Sequence)): name = type(var).__name__ raise TupleError( f'Function {func} expected tuple, {name} got instead.')
python
{ "resource": "" }
q1161
io_check
train
def io_check(*args, func=None): """Check if arguments are file-like object.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, io.IOBase): name = type(var).__name__ raise IOObjError( f'Function {func} expected file-like object, {name} got instead.')
python
{ "resource": "" }
q1162
info_check
train
def info_check(*args, func=None): """Check if arguments are Info instance.""" from pcapkit.corekit.infoclass import Info func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, Info): name = type(var).__name__ raise InfoError( f'Function {func} expected Info instance, {name} got instead.')
python
{ "resource": "" }
q1163
ip_check
train
def ip_check(*args, func=None): """Check if arguments are IP addresses.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, ipaddress._IPAddressBase): name = type(var).__name__ raise IPError( f'Function {func} expected IP address, {name} got instead.')
python
{ "resource": "" }
q1164
enum_check
train
def enum_check(*args, func=None): """Check if arguments are of protocol type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (enum.EnumMeta, aenum.EnumMeta)): name = type(var).__name__ raise EnumError( f'Function {func} expected enumeration, {name} got instead.')
python
{ "resource": "" }
q1165
frag_check
train
def frag_check(*args, protocol, func=None): """Check if arguments are valid fragments.""" func = func or inspect.stack()[2][3] if 'IP' in protocol: _ip_frag_check(*args, func=func) elif 'TCP' in protocol: _tcp_frag_check(*args, func=func) else: raise FragmentError(f'Unknown fragmented protocol {protocol}.')
python
{ "resource": "" }
q1166
_ip_frag_check
train
def _ip_frag_check(*args, func=None): """Check if arguments are valid IP fragments.""" func = func or inspect.stack()[2][3] for var in args: dict_check(var, func=func) bufid = var.get('bufid') str_check(bufid[3], func=func) bool_check(var.get('mf'), func=func) ip_check(bufid[0], bufid[1], func=func) bytearray_check(var.get('header'), var.get('payload'), func=func) int_check(bufid[2], var.get('num'), var.get('fo'), var.get('ihl'), var.get('tl'), func=func)
python
{ "resource": "" }
q1167
pkt_check
train
def pkt_check(*args, func=None): """Check if arguments are valid packets.""" func = func or inspect.stack()[2][3] for var in args: dict_check(var, func=func) dict_check(var.get('frame'), func=func) enum_check(var.get('protocol'), func=func) real_check(var.get('timestamp'), func=func) ip_check(var.get('src'), var.get('dst'), func=func) bool_check(var.get('syn'), var.get('fin'), func=func) int_check(var.get('srcport'), var.get('dstport'), var.get('index'), func=func)
python
{ "resource": "" }
q1168
Reassembly.fetch
train
def fetch(self): """Fetch datagram.""" if self._newflg: self._newflg = False temp_dtgram = copy.deepcopy(self._dtgram) for (bufid, buffer) in self._buffer.items(): temp_dtgram += self.submit(buffer, bufid=bufid) return tuple(temp_dtgram) return tuple(self._dtgram)
python
{ "resource": "" }
q1169
Reassembly.index
train
def index(self, pkt_num): """Return datagram index.""" int_check(pkt_num) for counter, datagram in enumerate(self.datagram): if pkt_num in datagram.index: return counter return None
python
{ "resource": "" }
q1170
Reassembly.run
train
def run(self, packets): """Run automatically. Positional arguments: * packets -- list<dict>, list of packet dicts to be reassembled """ for packet in packets: frag_check(packet, protocol=self.protocol) info = Info(packet) self.reassembly(info) self._newflg = True
python
{ "resource": "" }
q1171
MH.read_mh
train
def read_mh(self, length, extension): """Read Mobility Header. Structure of MH header [RFC 6275]: +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Payload Proto | Header Len | MH Type | Reserved | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Checksum | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | | . . . Message Data . . . | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 mh.next Next Header 1 8 mh.length Header Length 2 16 mh.type Mobility Header Type 3 24 - Reserved 4 32 mh.chksum Checksum 6 48 mh.data Message Data """ if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) _type = self._read_unpack(1) _temp = self._read_fileng(1) _csum = self._read_fileng(2) # _data = self._read_fileng((_hlen+1)*8) mh = dict( next=_next, length=(_hlen + 1) * 8, type=_MOBILITY_TYPE.get(_type, 'Unassigned'), chksum=_csum, ) length -= mh['length'] mh['packet'] = self._read_packet(header=mh['length'], payload=length) if extension: self._protos = None return mh return self._decode_next_layer(mh, _next, length)
python
{ "resource": "" }
q1172
IPX.read_ipx
train
def read_ipx(self, length): """Read Internetwork Packet Exchange. Structure of IPX header [RFC 1132]: Octets Bits Name Description 0 0 ipx.cksum Checksum 2 16 ipx.len Packet Length (header includes) 4 32 ipx.count Transport Control (hop count) 5 40 ipx.type Packet Type 6 48 ipx.dst Destination Address 18 144 ipx.src Source Address """ if length is None: length = len(self) _csum = self._read_fileng(2) _tlen = self._read_unpack(2) _ctrl = self._read_unpack(1) _type = self._read_unpack(1) _dsta = self._read_ipx_address() _srca = self._read_ipx_address() ipx = dict( chksum=_csum, len=_tlen, count=_ctrl, type=TYPE.get(_type), dst=_dsta, src=_srca, ) proto = ipx['type'] length = ipx['len'] - 30 ipx['packet'] = self._read_packet(header=30, payload=length) return self._decode_next_layer(ipx, proto, length)
python
{ "resource": "" }
q1173
IPX._read_ipx_address
train
def _read_ipx_address(self): """Read IPX address field. Structure of IPX address: Octets Bits Name Description 0 0 ipx.addr.network Network Number 4 32 ipx.addr.node Node Number 10 80 ipx.addr.socket Socket Number """ # Address Number _byte = self._read_fileng(4) _ntwk = ':'.join(textwrap.wrap(_byte.hex(), 2)) # Node Number (MAC) _byte = self._read_fileng(6) _node = ':'.join(textwrap.wrap(_byte.hex(), 2)) _maca = '-'.join(textwrap.wrap(_byte.hex(), 2)) # Socket Number _sock = self._read_fileng(2) # Whole Address _list = [_ntwk, _node, _sock.hex()] _addr = ':'.join(_list) addr = dict( network=_ntwk, node=_maca, socket=SOCK.get(int(_sock.hex(), base=16)) or _sock, addr=_addr, ) return addr
python
{ "resource": "" }
q1174
ARP.read_arp
train
def read_arp(self, length): """Read Address Resolution Protocol. Structure of ARP header [RFC 826]: Octets Bits Name Description 0 0 arp.htype Hardware Type 2 16 arp.ptype Protocol Type 4 32 arp.hlen Hardware Address Length 5 40 arp.plen Protocol Address Length 6 48 arp.oper Operation 8 64 arp.sha Sender Hardware Address 14 112 arp.spa Sender Protocol Address 18 144 arp.tha Target Hardware Address 24 192 arp.tpa Target Protocol Address """ if length is None: length = len(self) _hwty = self._read_unpack(2) _ptty = self._read_unpack(2) _hlen = self._read_unpack(1) _plen = self._read_unpack(1) _oper = self._read_unpack(2) _shwa = self._read_addr_resolve(_hlen, _hwty) _spta = self._read_proto_resolve(_plen, _ptty) _thwa = self._read_addr_resolve(_hlen, _hwty) _tpta = self._read_proto_resolve(_plen, _ptty) if _oper in (5, 6, 7): self._acnm = 'DRARP' self._name = 'Dynamic Reverse Address Resolution Protocol' elif _oper in (8, 9): self._acnm = 'InARP' self._name = 'Inverse Address Resolution Protocol' elif _oper in (3, 4): self._acnm = 'RARP' self._name = 'Reverse Address Resolution Protocol' else: self._acnm = 'ARP' self._name = 'Address Resolution Protocol' _htype = HRD.get(_hwty) if re.match(r'.*Ethernet.*', _htype, re.IGNORECASE): _ptype = ETHERTYPE.get(_ptty) else: _ptype = f'Unknown [{_ptty}]' arp = dict( htype=_htype, ptype=_ptype, hlen=_hlen, plen=_plen, oper=OPER.get(_oper), sha=_shwa, spa=_spta, tha=_thwa, tpa=_tpta, len=8 + _hlen * 2 + _plen * 2, ) length -= arp['len'] arp['packet'] = self._read_packet(header=arp['len'], payload=length) return self._decode_next_layer(arp, None, length)
python
{ "resource": "" }
q1175
ARP._read_addr_resolve
train
def _read_addr_resolve(self, length, htype): """Resolve MAC address according to protocol. Positional arguments: * length -- int, hardware address length * htype -- int, hardware type Returns: * str -- MAC address """ if htype == 1: # Ethernet _byte = self._read_fileng(6) _addr = '-'.join(textwrap.wrap(_byte.hex(), 2)) else: _addr = self._read_fileng(length) return _addr
python
{ "resource": "" }
q1176
ARP._read_proto_resolve
train
def _read_proto_resolve(self, length, ptype): """Resolve IP address according to protocol. Positional arguments: * length -- int, protocol address length * ptype -- int, protocol type Returns: * str -- IP address """ # if ptype == '0800': # IPv4 # _byte = self._read_fileng(4) # _addr = '.'.join([str(_) for _ in _byte]) # elif ptype == '86dd': # IPv6 # adlt = [] # list of IPv6 hexadecimal address # ctr_ = collections.defaultdict(int) # # counter for consecutive groups of zero value # ptr_ = 0 # start pointer of consecutive groups of zero value # last = False # if last hextet/group is zero value # omit = False # omitted flag, since IPv6 address can omit to `::` only once # for index in range(8): # hex_ = self._read_fileng(2).hex().lstrip('0') # if hex_: # if hextet is not '', directly append # adlt.append(hex_) # last = False # else: # if hextet is '', append '0' # adlt.append('0') # if last: # if last hextet is '', ascend counter # ctr_[ptr_] += 1 # else: # if last hextet is not '', record pointer # ptr_ = index # last = True # ctr_[ptr_] = 1 # ptr_ = max(ctr_, key=ctr_.get) if ctr_ else 0 # fetch start pointer with longest zero values # end_ = ptr_ + ctr_[ptr_] # calculate end pointer # if ctr_[ptr_] > 1: # only omit if zero values are in a consecutive group # del adlt[ptr_:end_] # remove zero values # if ptr_ == 0 and end_ == 8: # insert `::` if IPv6 unspecified address (::) # adlt.insert(ptr_, '::') # elif ptr_ == 0 or end_ == 8: # insert `:` if zero values are from start or at end # adlt.insert(ptr_, ':') # else: # insert '' otherwise # adlt.insert(ptr_, '') # _addr = ':'.join(adlt) # else: # _addr = self._read_fileng(length) # return _addr if ptype == '0800': # IPv4 return ipaddress.ip_address(self._read_fileng(4)) elif ptype == '86dd': # IPv6 return ipaddress.ip_address(self._read_fileng(16)) else: return self._read_fileng(length)
python
{ "resource": "" }
q1177
IPv6._read_ip_hextet
train
def _read_ip_hextet(self): """Read first four hextets of IPv6.""" _htet = self._read_fileng(4).hex() _vers = _htet[0] # version number (6) _tcls = int(_htet[0:2], base=16) # traffic class _flow = int(_htet[2:], base=16) # flow label return (_vers, _tcls, _flow)
python
{ "resource": "" }
q1178
HIP.read_hip
train
def read_hip(self, length, extension): """Read Host Identity Protocol. Structure of HIP header [RFC 5201][RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Next Header | Header Length |0| Packet Type |Version| RES.|1| +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Checksum | Controls | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Sender's Host Identity Tag (HIT) | | | | | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Receiver's Host Identity Tag (HIT) | | | | | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | / HIP Parameters / / / | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hip.next Next Header 1 8 hip.length Header Length 2 16 - Reserved (0) 2 17 hip.type Packet Type 3 24 hip.version Version 3 28 - Reserved 3 31 - Reserved (1) 4 32 hip.chksum Checksum 6 48 hip.control Controls 8 64 hip.shit Sender's Host Identity Tag 24 192 hip.rhit Receiver's Host Identity Tag 40 320 hip.parameters HIP Parameters """ if length is None: length = len(self) _next = self._read_protos(1) _hlen = self._read_unpack(1) _type = self._read_binary(1) if _type[0] != '0': raise ProtocolError('HIP: invalid format') _vers = self._read_binary(1) if _vers[7] != '1': raise ProtocolError('HIP: invalid format') _csum = self._read_fileng(2) _ctrl = self._read_binary(2) _shit = self._read_unpack(16) _rhit = self._read_unpack(16) hip = dict( next=_next, length=(_hlen + 1) * 8, type=_HIP_TYPES.get(int(_type[1:], base=2), 'Unassigned'), version=int(_vers[:4], base=2), chksum=_csum, control=dict( anonymous=True if int(_ctrl[15], base=2) else False, ), shit=_shit, rhit=_rhit, ) _prml = _hlen - 38 if _prml: parameters = self._read_hip_para(_prml, version=hip['version']) hip['parameters'] = parameters[0] # tuple of parameter acronyms hip.update(parameters[1]) # merge parameters info to buffer length -= hip['length'] hip['packet'] = self._read_packet(header=hip['length'], payload=length) if extension: self._protos = None return hip return self._decode_next_layer(hip, _next, length)
python
{ "resource": "" }
q1179
HIP._read_hip_para
train
def _read_hip_para(self, length, *, version): """Read HIP parameters. Positional arguments: * length -- int, length of parameters Keyword arguments: * version -- int, HIP version Returns: * dict -- extracted HIP parameters """ counter = 0 # length of read parameters optkind = list() # parameter type list options = dict() # dict of parameter data while counter < length: # break when eol triggered kind = self._read_binary(2) if not kind: break # get parameter type & C-bit code = int(kind, base=2) cbit = True if int(kind[15], base=2) else False # get parameter length clen = self._read_unpack(2) plen = 11 + clen - (clen + 3) % 8 # extract parameter dscp = _HIP_PARA.get(code, 'Unassigned') # if 0 <= code <= 1023 or 61440 <= code <= 65535: # desc = f'{dscp} (IETF Review)' # elif 1024 <= code <= 32767 or 49152 <= code <= 61439: # desc = f'{dscp} (Specification Required)' # elif 32768 <= code <= 49151: # desc = f'{dscp} (Reserved for Private Use)' # else: # raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid parameter') data = _HIP_PROC(dscp)(self, code, cbit, clen, desc=dscp, length=plen, version=version) # record parameter data counter += plen if dscp in optkind: if isinstance(options[dscp], tuple): options[dscp] += (Info(data),) else: options[dscp] = (Info(options[dscp]), Info(data)) else: optkind.append(dscp) options[dscp] = data # check threshold if counter != length: raise ProtocolError(f'HIPv{version}: invalid format') return tuple(optkind), options
python
{ "resource": "" }
q1180
HIP._read_para_unassigned
train
def _read_para_unassigned(self, code, cbit, clen, *, desc, length, version): """Read HIP unassigned parameters. Structure of HIP unassigned parameters [RFC 5201][RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type |C| Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | | / Contents / / +-+-+-+-+-+-+-+-+ | | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 para.type Parameter Type 1 15 para.critical Critical Bit 2 16 para.length Length of Contents 4 32 para.contents Contents - - - Padding """ unassigned = dict( type=desc, critical=cbit, length=clen, contents=self._read_fileng(clen), ) plen = length - clen if plen: self._read_fileng(plen) return unassigned
python
{ "resource": "" }
q1181
HIP._read_para_esp_info
train
def _read_para_esp_info(self, code, cbit, clen, *, desc, length, version): """Read HIP ESP_INFO parameter. Structure of HIP ESP_INFO parameter [RFC 7402]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | KEYMAT Index | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | OLD SPI | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | NEW SPI | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 esp_info.type Parameter Type 1 15 esp_info.critical Critical Bit 2 16 esp_info.length Length of Contents 4 32 - Reserved 6 48 esp_info.index KEYMAT Index 8 64 esp_info.old_spi OLD SPI 12 96 esp_info.new_spi NEW SPI """ if clen != 12: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _resv = self._read_fileng(2) _kind = self._read_unpack(2) _olds = self._read_unpack(2) _news = self._read_unpack(2) esp_info = dict( type=desc, critical=cbit, length=clen, index=_kind, old_spi=_olds, new_spi=_news, ) return esp_info
python
{ "resource": "" }
q1182
HIP._read_para_r1_counter
train
def _read_para_r1_counter(self, code, cbit, clen, *, desc, length, version): """Read HIP R1_COUNTER parameter. Structure of HIP R1_COUNTER parameter [RFC 5201][RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved, 4 bytes | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | R1 generation counter, 8 bytes | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ri_counter.type Parameter Type 1 15 ri_counter.critical Critical Bit 2 16 ri_counter.length Length of Contents 4 32 - Reserved 8 64 ri_counter.count Generation of Valid Puzzles """ if clen != 12: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') if code == 128 and version != 1: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid parameter') _resv = self._read_fileng(4) _genc = self._read_unpack(8) r1_counter = dict( type=desc, critical=cbit, length=clen, count=_genc, ) return r1_counter
python
{ "resource": "" }
q1183
HIP._read_para_locator_set
train
def _read_para_locator_set(self, code, cbit, clen, *, desc, length, version): """Read HIP LOCATOR_SET parameter. Structure of HIP LOCATOR_SET parameter [RFC 8046]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Traffic Type | Locator Type | Locator Length | Reserved |P| +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Locator Lifetime | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Locator | | | | | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ . . . . +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Traffic Type | Locator Type | Locator Length | Reserved |P| +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Locator Lifetime | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Locator | | | | | | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 locator_set.type Parameter Type 1 15 locator_set.critical Critical Bit 2 16 locator_set.length Length of Contents 4 32 locator.traffic Traffic Type 5 40 locator.type Locator Type 6 48 locator.length Locator Length 7 56 - Reserved 7 63 locator.preferred Preferred Locator 8 64 locator.lifetime Locator Lifetime 12 96 locator.object Locator ............ """ def _read_locator(kind, size): if kind == 0 and size == 16: return ipaddress.ip_address(self._read_fileng(16)) elif kind == 1 and size == 20: return dict( spi=self._read_unpack(4), ip=ipaddress.ip_address(self._read_fileng(16)), ) else: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _size = 0 # length of read locators _locs = list() # list of locators while _size < clen: _traf = self._read_unpack(1) _loct = self._read_unpack(1) _locl = self._read_unpack(1) * 4 _resp = self._read_binary(1) _life = self._read_unpack(4) _lobj = _read_locator(_loct, _locl) _locs.append(Info( traffic=_traf, type=_loct, length=_locl, preferred=int(_resp[7], base=2), lifetime=_life, object=_lobj, )) locator_set = dict( type=desc, critical=cbit, length=clen, locator=tuple(_locs), ) return locator_set
python
{ "resource": "" }
q1184
HIP._read_para_puzzle
train
def _read_para_puzzle(self, code, cbit, clen, *, desc, length, version): """Read HIP PUZZLE parameter. Structure of HIP PUZZLE parameter [RFC 5201][RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | #K, 1 byte | Lifetime | Opaque, 2 bytes | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Random #I, RHASH_len / 8 bytes | / / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 puzzle.type Parameter Type 1 15 puzzle.critical Critical Bit 2 16 puzzle.length Length of Contents 4 32 puzzle.number Number of Verified Bits 5 40 puzzle.lifetime Lifetime 6 48 puzzle.opaque Opaque 8 64 puzzle.random Random Number """ if version == 1 and clen != 12: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _numk = self._read_unpack(1) _time = self._read_unpack(1) _opak = self._read_fileng(2) _rand = self._read_unpack(clen-4) puzzle = dict( type=desc, critical=cbit, length=clen, number=_numk, lifetime=2 ** (_time - 32), opaque=_opak, random=_rand, ) _plen = length - clen if _plen: self._read_fileng(_plen) return puzzle
python
{ "resource": "" }
q1185
HIP._read_para_seq
train
def _read_para_seq(self, code, cbit, clen, *, desc, length, version): """Read HIP SEQ parameter. Structure of HIP SEQ parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Update ID | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 seq.type Parameter Type 1 15 seq.critical Critical Bit 2 16 seq.length Length of Contents 4 32 seq.id Update ID """ if clen != 4: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _upid = self._read_unpack(4) seq = dict( type=desc, critical=cbit, length=clen, id=_upid, ) return seq
python
{ "resource": "" }
q1186
HIP._read_para_ack
train
def _read_para_ack(self, code, cbit, clen, *, desc, length, version): """Read HIP ACK parameter. Structure of HIP ACK parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | peer Update ID 1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / peer Update ID n | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 ack.type Parameter Type 1 15 ack.critical Critical Bit 2 16 ack.length Length of Contents 4 32 ack.id Peer Update ID """ if clen % 4 != 0: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _upid = list() for _ in range(clen // 4): _upid.append(self._read_unpack(4)) ack = dict( type=desc, critical=cbit, length=clen, id=tuple(_upid), ) return ack
python
{ "resource": "" }
q1187
HIP._read_para_dh_group_list
train
def _read_para_dh_group_list(self, code, cbit, clen, *, desc, length, version): """Read HIP DH_GROUP_LIST parameter. Structure of HIP DH_GROUP_LIST parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | DH GROUP ID #1| DH GROUP ID #2| DH GROUP ID #3| DH GROUP ID #4| +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | DH GROUP ID #n| Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 dh_group_list.type Parameter Type 1 15 dh_group_list.critical Critical Bit 2 16 dh_group_list.length Length of Contents 4 32 dh_group_list.id DH GROUP ID """ _dhid = list() for _ in range(clen): _dhid.append(_GROUP_ID.get(self._read_unpack(1), 'Unassigned')) dh_group_list = dict( type=desc, critical=cbit, length=clen, id=tuple(_dhid), ) _plen = length - clen if _plen: self._read_fileng(_plen) return dh_group_list
python
{ "resource": "" }
q1188
HIP._read_para_diffie_hellman
train
def _read_para_diffie_hellman(self, code, cbit, clen, *, desc, length, version): """Read HIP DIFFIE_HELLMAN parameter. Structure of HIP DIFFIE_HELLMAN parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Group ID | Public Value Length | Public Value / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 diffie_hellman.type Parameter Type 1 15 diffie_hellman.critical Critical Bit 2 16 diffie_hellman.length Length of Contents 4 32 diffie_hellman.id Group ID 5 40 diffie_hellman.pub_len Public Value Length 6 48 diffie_hellman.pub_val Public Value ? ? - Padding """ _gpid = self._read_unpack(1) _vlen = self._read_unpack(2) _pval = self._read_fileng(_vlen) diffie_hellman = dict( type=desc, critical=cbit, length=clen, id=_GROUP_ID.get(_gpid, 'Unassigned'), pub_len=_vlen, pub_val=_pval, ) _plen = length - clen if _plen: self._read_fileng(_plen) return diffie_hellman
python
{ "resource": "" }
q1189
HIP._read_para_hip_transform
train
def _read_para_hip_transform(self, code, cbit, clen, *, desc, length, version): """Read HIP HIP_TRANSFORM parameter. Structure of HIP HIP_TRANSFORM parameter [RFC 5201]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Suite ID #1 | Suite ID #2 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Suite ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hip_transform.type Parameter Type 1 15 hip_transform.critical Critical Bit 2 16 hip_transform.length Length of Contents 4 32 hip_transform.id Group ID ............ ? ? - Padding """ if version != 1: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid parameter') if clen % 2 != 0: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _stid = list() for _ in range(clen // 2): _stid.append(_SUITE_ID.get(self._read_unpack(2), 'Unassigned')) hip_transform = dict( type=desc, critical=cbit, length=clen, id=_stid, ) _plen = length - clen if _plen: self._read_fileng(_plen) return hip_transform
python
{ "resource": "" }
q1190
HIP._read_para_hip_cipher
train
def _read_para_hip_cipher(self, code, cbit, clen, *, desc, length, version): """Read HIP HIP_CIPHER parameter. Structure of HIP HIP_CIPHER parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Cipher ID #1 | Cipher ID #2 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Cipher ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hip_cipher.type Parameter Type 1 15 hip_cipher.critical Critical Bit 2 16 hip_cipher.length Length of Contents 4 32 hip_cipher.id Cipher ID ............ ? ? - Padding """ if clen % 2 != 0: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _cpid = list() for _ in range(clen // 2): _cpid.append(_CIPHER_ID.get(self._read_unpack(2), 'Unassigned')) hip_cipher = dict( type=desc, critical=cbit, length=clen, id=_cpid, ) _plen = length - clen if _plen: self._read_fileng(_plen) return hip_cipher
python
{ "resource": "" }
q1191
HIP._read_para_nat_traversal_mode
train
def _read_para_nat_traversal_mode(self, code, cbit, clen, *, desc, length, version): """Read HIP NAT_TRAVERSAL_MODE parameter. Structure of HIP NAT_TRAVERSAL_MODE parameter [RFC 5770]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | Mode ID #1 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #2 | Mode ID #3 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Mode ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 nat_traversal_mode.type Parameter Type 1 15 nat_traversal_mode.critical Critical Bit 2 16 nat_traversal_mode.length Length of Contents 4 32 - Reserved 6 48 nat_traversal_mode.id Mode ID ............ ? ? - Padding """ if clen % 2 != 0: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _resv = self._read_fileng(2) _mdid = list() for _ in range((clen - 2) // 2): _mdid.append(_MODE_ID.get(self._read_unpack(2), 'Unassigned')) nat_traversal_mode = dict( type=desc, critical=cbit, length=clen, id=_mdid, ) _plen = length - clen if _plen: self._read_fileng(_plen) return nat_traversal_mode
python
{ "resource": "" }
q1192
HIP._read_para_transaction_pacing
train
def _read_para_transaction_pacing(self, code, cbit, clen, *, desc, length, version): """Read HIP TRANSACTION_PACING parameter. Structure of HIP TRANSACTION_PACING parameter [RFC 5770]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Min Ta | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 transaction_pacing.type Parameter Type 1 15 transaction_pacing.critical Critical Bit 2 16 transaction_pacing.length Length of Contents 4 32 transaction_pacing.min_ta Min Ta """ if clen != 4: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _data = self._read_unpack(4) transaction_pacing = dict( type=desc, critical=cbit, length=clen, min_ta=_data, ) return transaction_pacing
python
{ "resource": "" }
q1193
HIP._read_para_encrypted
train
def _read_para_encrypted(self, code, cbit, clen, *, desc, length, version): """Read HIP ENCRYPTED parameter. Structure of HIP ENCRYPTED parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | IV / / / / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / / Encrypted data / / / / +-------------------------------+ / | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 encrypted.type Parameter Type 1 15 encrypted.critical Critical Bit 2 16 encrypted.length Length of Contents 4 32 - Reserved 8 48 encrypted.iv Initialization Vector ? ? encrypted.data Encrypted data ? ? - Padding """ _resv = self._read_fileng(4) _data = self._read_fileng(clen-4) encrypted = dict( type=desc, critical=cbit, length=clen, raw=_data, ) _plen = length - clen if _plen: self._read_fileng(_plen) return encrypted
python
{ "resource": "" }
q1194
HIP._read_para_host_id
train
def _read_para_host_id(self, code, cbit, clen, *, desc, length, version): """Read HIP HOST_ID parameter. Structure of HIP HOST_ID parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | HI Length |DI-Type| DI Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Algorithm | Host Identity / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / | Domain Identifier / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 host_id.type Parameter Type 1 15 host_id.critical Critical Bit 2 16 host_id.length Length of Contents 4 32 host_id.id_len Host Identity Length 6 48 host_id.di_type Domain Identifier Type 6 52 host_id.di_len Domain Identifier Length 8 64 host_id.algorithm Algorithm 10 80 host_id.host_id Host Identity ? ? host_id.domain_id Domain Identifier ? ? - Padding """ def _read_host_identifier(length, code): algorithm = _HI_ALGORITHM.get(code, 'Unassigned') if algorithm == 'ECDSA': host_id = dict( curve=_ECDSA_CURVE.get(self._read_unpack(2)), pubkey=self._read_fileng(length-2), ) elif algorithm == 'ECDSA_LOW': host_id = dict( curve=_ECDSA_LOW_CURVE.get(self._read_unpack(2)), pubkey=self._read_fileng(length-2), ) else: host_id = self._read_fileng(length) return algorithm, host_id def _read_domain_identifier(di_data): di_type = _DI_TYPE.get(int(di_data[:4], base=2), 'Unassigned') di_len = int(di_data[4:], base=2) domain_id = self._read_fileng(di_len) return di_type, di_len, domain_id _hlen = self._read_unpack(2) _didt = self._read_binary(2) _algo = self._read_unpack(2) _hidf = _read_host_identifier(_hlen, _algo) _didf = _read_domain_identifier(_didt) host_id = dict( type=desc, critical=cbit, length=clen, id_len=_hlen, di_type=_didf[0], di_len=_didf[1], algorithm=_hidf[0], host_id=_hidf[1], domain_id=_didf[2], ) _plen = length - clen if _plen: self._read_fileng(_plen) return host_id
python
{ "resource": "" }
q1195
HIP._read_para_hit_suite_list
train
def _read_para_hit_suite_list(self, code, cbit, clen, *, desc, length, version): """Read HIP HIT_SUITE_LIST parameter. Structure of HIP HIT_SUITE_LIST parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | ID #1 | ID #2 | ID #3 | ID #4 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | ID #n | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 hit_suite_list.type Parameter Type 1 15 hit_suite_list.critical Critical Bit 2 16 hit_suite_list.length Length of Contents 4 32 hit_suite_list.id HIT Suite ID ............ ? ? - Padding """ _hsid = list() for _ in range(clen): _hsid.append(_HIT_SUITE_ID.get(self._read_unpack(1), 'Unassigned')) hit_suite_list = dict( type=desc, critical=cbit, length=clen, id=tuple(_hsid), ) _plen = length - clen if _plen: self._read_fileng(_plen) return hit_suite_list
python
{ "resource": "" }
q1196
HIP._read_para_cert
train
def _read_para_cert(self, code, cbit, clen, *, desc, length, version): """Read HIP CERT parameter. Structure of HIP CERT parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | CERT group | CERT count | CERT ID | CERT type | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Certificate / +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ / | Padding (variable length) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 cert.type Parameter Type 1 15 cert.critical Critical Bit 2 16 cert.length Length of Contents 4 32 cert.group CERT Group 5 40 cert.count CERT Count 6 48 cert.id CERT ID 7 56 cert.cert_type CERT Type 8 64 cert.certificate Certificate ? ? - Padding """ _ctgp = self._read_unpack(1) _ctct = self._read_unpack(1) _ctid = self._read_unpack(1) _cttp = self._read_unpack(1) _ctdt = self._read_fileng(clen-4) cert = dict( type=desc, critical=cbit, length=clen, group=_GROUP_ID.get(_ctgp, 'Unassigned'), count=_ctct, id=_ctid, cert_type=_CERT_TYPE.get(_cttp, 'Unassigned'), certificate=_ctdt, ) _plen = length - clen if _plen: self._read_fileng(_plen) return cert
python
{ "resource": "" }
q1197
HIP._read_para_notification
train
def _read_para_notification(self, code, cbit, clen, *, desc, length, version): """Read HIP NOTIFICATION parameter. Structure of HIP NOTIFICATION parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Reserved | Notify Message Type | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | / / Notification Data / / +---------------+ / | Padding | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 notification.type Parameter Type 1 15 notification.critical Critical Bit 2 16 notification.length Length of Contents 4 32 - Reserved 6 48 notification.msg_type Notify Message Type 8 64 notification.data Notification Data ? ? - Padding """ _resv = self._read_fileng(2) _code = self._read_unpack(2) _data = self._read_fileng(2) _type = _NOTIFICATION_TYPE.get(_code) if _type is None: if 1 <= _code <= 50: _type = 'Unassigned (IETF Review)' elif 51 <= _code <= 8191: _type = 'Unassigned (Specification Required; Error Message)' elif 8192 <= _code <= 16383: _type = 'Unassigned (Reserved for Private Use; Error Message)' elif 16384 <= _code <= 40959: _type = 'Unassigned (Specification Required; Status Message)' elif 40960 <= _code <= 65535: _type = 'Unassigned (Reserved for Private Use; Status Message)' else: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') notification = dict( type=desc, critical=cbit, length=clen, msg_type=_type, data=_data, ) _plen = length - clen if _plen: self._read_fileng(_plen) return notification
python
{ "resource": "" }
q1198
HIP._read_para_echo_request_signed
train
def _read_para_echo_request_signed(self, code, cbit, clen, *, desc, length, version): """Read HIP ECHO_REQUEST_SIGNED parameter. Structure of HIP ECHO_REQUEST_SIGNED parameter [RFC 7401]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Opaque data (variable length) | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 echo_request_signed.type Parameter Type 1 15 echo_request_signed.critical Critical Bit 2 16 echo_request_signed.length Length of Contents 4 32 echo_request_signed.data Opaque Data """ _data = self._read_fileng(clen) echo_request_signed = dict( type=desc, critical=cbit, length=clen, data=_data, ) _plen = length - clen if _plen: self._read_fileng(_plen) return echo_request_signed
python
{ "resource": "" }
q1199
HIP._read_para_reg_failed
train
def _read_para_reg_failed(self, code, cbit, clen, *, desc, length, version): """Read HIP REG_FAILED parameter. Structure of HIP REG_FAILED parameter [RFC 8003]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Type | Length | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Lifetime | Reg Type #1 | Reg Type #2 | Reg Type #3 | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | ... | ... | Reg Type #n | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Padding + | | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 reg_failed.type Parameter Type 1 15 reg_failed.critical Critical Bit 2 16 reg_failed.length Length of Contents 4 32 reg_failed.lifetime Lifetime 4 32 reg_failed.lifetime.min Min Lifetime 5 40 reg_failed.lifetime.max Max Lifetime 6 48 reg_failed.reg_typetype Reg Type ........... ? ? - Padding """ _life = collections.namedtuple('Lifetime', ('min', 'max')) _mint = self._read_unpack(1) _maxt = self._read_unpack(1) _type = list() for _ in range(clen-2): _code = self._read_unpack(1) _kind = _REG_FAILURE_TYPE.get(_code) if _kind is None: if 0 <= _code <= 200: _kind = 'Unassigned (IETF Review)' elif 201 <= _code <= 255: _kind = 'Unassigned (Reserved for Private Use)' else: raise ProtocolError(f'HIPv{version}: [Parano {code}] invalid format') _type.append(_kind) reg_failed = dict( type=desc, critical=cbit, length=clen, lifetime=_life(_mint, _maxt), reg_type=tuple(_type), ) _plen = length - clen if _plen: self._read_fileng(_plen) return reg_failed
python
{ "resource": "" }