text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def canGoBack(self): """ Returns whether or not this wizard can move forward. :return <bool> """ try: backId = self._navigation.index(self.currentId())-1 if backId >= 0: self._navigation[backId] else: return False except StandardError: return False else: return True
[ "def", "canGoBack", "(", "self", ")", ":", "try", ":", "backId", "=", "self", ".", "_navigation", ".", "index", "(", "self", ".", "currentId", "(", ")", ")", "-", "1", "if", "backId", ">=", "0", ":", "self", ".", "_navigation", "[", "backId", "]", "else", ":", "return", "False", "except", "StandardError", ":", "return", "False", "else", ":", "return", "True" ]
25.25
16.5
def release(ctx, type_, repo=None, prebump_to=PREBUMP): """Make a new release. """ unprebump(ctx) if bump_release(ctx, type_=type_): return this_version = _read_version() ctx.run('towncrier') ctx.run(f'git commit -am "Release {this_version}"') ctx.run(f'git tag -fa {this_version} -m "Version {this_version}"') if repo: if upload(ctx, repo=repo): return else: print('[release] Missing --repo, skip uploading') prebump(ctx, type_=prebump_to) next_version = _read_version() ctx.run(f'git commit -am "Prebump to {next_version}"')
[ "def", "release", "(", "ctx", ",", "type_", ",", "repo", "=", "None", ",", "prebump_to", "=", "PREBUMP", ")", ":", "unprebump", "(", "ctx", ")", "if", "bump_release", "(", "ctx", ",", "type_", "=", "type_", ")", ":", "return", "this_version", "=", "_read_version", "(", ")", "ctx", ".", "run", "(", "'towncrier'", ")", "ctx", ".", "run", "(", "f'git commit -am \"Release {this_version}\"'", ")", "ctx", ".", "run", "(", "f'git tag -fa {this_version} -m \"Version {this_version}\"'", ")", "if", "repo", ":", "if", "upload", "(", "ctx", ",", "repo", "=", "repo", ")", ":", "return", "else", ":", "print", "(", "'[release] Missing --repo, skip uploading'", ")", "prebump", "(", "ctx", ",", "type_", "=", "prebump_to", ")", "next_version", "=", "_read_version", "(", ")", "ctx", ".", "run", "(", "f'git commit -am \"Prebump to {next_version}\"'", ")" ]
27.136364
19.363636
def ToABMag(self, wave, flux, **kwargs): """Convert to ``abmag``. .. math:: \\textnormal{AB}_{\\nu} = -2.5 \\; \\log(h \\lambda \\; \\textnormal{photlam}) - 48.6 where :math:`h` is as defined in :ref:`pysynphot-constants`. Parameters ---------- wave, flux : number or array_like Wavelength and flux values to be used for conversion. kwargs : dict Extra keywords (not used). Returns ------- result : number or array_like Converted values. """ arg = H * flux * wave return -1.085736 * N.log(arg) + ABZERO
[ "def", "ToABMag", "(", "self", ",", "wave", ",", "flux", ",", "*", "*", "kwargs", ")", ":", "arg", "=", "H", "*", "flux", "*", "wave", "return", "-", "1.085736", "*", "N", ".", "log", "(", "arg", ")", "+", "ABZERO" ]
25.48
22.4
def _authenticate(self, client): """Perform AMQP authentication.""" yield client.authenticate( self._username, self._password, mechanism=self._auth_mechanism) returnValue(client)
[ "def", "_authenticate", "(", "self", ",", "client", ")", ":", "yield", "client", ".", "authenticate", "(", "self", ".", "_username", ",", "self", ".", "_password", ",", "mechanism", "=", "self", ".", "_auth_mechanism", ")", "returnValue", "(", "client", ")" ]
42
12.4
def split_options(opts, validate=True, warn=False, normalize=True): """Takes the options portion of a MongoDB URI, validates each option and returns the options in a dictionary. :Parameters: - `opt`: A string representing MongoDB URI options. - `validate`: If ``True`` (the default), validate and normalize all options. - `warn`: If ``False`` (the default), suppress all warnings raised during validation of options. - `normalize`: If ``True`` (the default), renames all options to their internally-used names. """ and_idx = opts.find("&") semi_idx = opts.find(";") try: if and_idx >= 0 and semi_idx >= 0: raise InvalidURI("Can not mix '&' and ';' for option separators.") elif and_idx >= 0: options = _parse_options(opts, "&") elif semi_idx >= 0: options = _parse_options(opts, ";") elif opts.find("=") != -1: options = _parse_options(opts, None) else: raise ValueError except ValueError: raise InvalidURI("MongoDB URI options are key=value pairs.") options = _handle_option_deprecations(options) if validate: options = validate_options(options, warn) if normalize: options = _normalize_options(options) return options
[ "def", "split_options", "(", "opts", ",", "validate", "=", "True", ",", "warn", "=", "False", ",", "normalize", "=", "True", ")", ":", "and_idx", "=", "opts", ".", "find", "(", "\"&\"", ")", "semi_idx", "=", "opts", ".", "find", "(", "\";\"", ")", "try", ":", "if", "and_idx", ">=", "0", "and", "semi_idx", ">=", "0", ":", "raise", "InvalidURI", "(", "\"Can not mix '&' and ';' for option separators.\"", ")", "elif", "and_idx", ">=", "0", ":", "options", "=", "_parse_options", "(", "opts", ",", "\"&\"", ")", "elif", "semi_idx", ">=", "0", ":", "options", "=", "_parse_options", "(", "opts", ",", "\";\"", ")", "elif", "opts", ".", "find", "(", "\"=\"", ")", "!=", "-", "1", ":", "options", "=", "_parse_options", "(", "opts", ",", "None", ")", "else", ":", "raise", "ValueError", "except", "ValueError", ":", "raise", "InvalidURI", "(", "\"MongoDB URI options are key=value pairs.\"", ")", "options", "=", "_handle_option_deprecations", "(", "options", ")", "if", "validate", ":", "options", "=", "validate_options", "(", "options", ",", "warn", ")", "if", "normalize", ":", "options", "=", "_normalize_options", "(", "options", ")", "return", "options" ]
34.763158
19.473684
def _wrap(x, serializeFunc, encodeFunc=base64.urlsafe_b64encode, compress=True): """ Wraps an element @x by serializing and then encoding the resulting bytes. """ return encodeFunc(serializeFunc(x, compress))
[ "def", "_wrap", "(", "x", ",", "serializeFunc", ",", "encodeFunc", "=", "base64", ".", "urlsafe_b64encode", ",", "compress", "=", "True", ")", ":", "return", "encodeFunc", "(", "serializeFunc", "(", "x", ",", "compress", ")", ")" ]
44
17.2
def handle(self, argv=None): """ Main function. Parses command, load settings and dispatches accordingly. """ desc = ('Zappa - Deploy Python applications to AWS Lambda' ' and API Gateway.\n') parser = argparse.ArgumentParser(description=desc) parser.add_argument( '-v', '--version', action='version', version=pkg_resources.get_distribution("zappa").version, help='Print the zappa version' ) parser.add_argument( '--color', default='auto', choices=['auto','never','always'] ) env_parser = argparse.ArgumentParser(add_help=False) me_group = env_parser.add_mutually_exclusive_group() all_help = ('Execute this command for all of our defined ' 'Zappa stages.') me_group.add_argument('--all', action='store_true', help=all_help) me_group.add_argument('stage_env', nargs='?') group = env_parser.add_argument_group() group.add_argument( '-a', '--app_function', help='The WSGI application function.' ) group.add_argument( '-s', '--settings_file', help='The path to a Zappa settings file.' ) group.add_argument( '-q', '--quiet', action='store_true', help='Silence all output.' ) # https://github.com/Miserlou/Zappa/issues/407 # Moved when 'template' command added. # Fuck Terraform. group.add_argument( '-j', '--json', action='store_true', help='Make the output of this command be machine readable.' ) # https://github.com/Miserlou/Zappa/issues/891 group.add_argument( '--disable_progress', action='store_true', help='Disable progress bars.' ) ## # Certify ## subparsers = parser.add_subparsers(title='subcommands', dest='command') cert_parser = subparsers.add_parser( 'certify', parents=[env_parser], help='Create and install SSL certificate' ) cert_parser.add_argument( '--manual', action='store_true', help=("Gets new Let's Encrypt certificates, but prints them to console." "Does not update API Gateway domains.") ) cert_parser.add_argument( '-y', '--yes', action='store_true', help='Auto confirm yes.' ) ## # Deploy ## deploy_parser = subparsers.add_parser( 'deploy', parents=[env_parser], help='Deploy application.' ) deploy_parser.add_argument( '-z', '--zip', help='Deploy Lambda with specific local or S3 hosted zip package' ) ## # Init ## init_parser = subparsers.add_parser('init', help='Initialize Zappa app.') ## # Package ## package_parser = subparsers.add_parser( 'package', parents=[env_parser], help='Build the application zip package locally.' ) package_parser.add_argument( '-o', '--output', help='Name of file to output the package to.' ) ## # Template ## template_parser = subparsers.add_parser( 'template', parents=[env_parser], help='Create a CloudFormation template for this API Gateway.' ) template_parser.add_argument( '-l', '--lambda-arn', required=True, help='ARN of the Lambda function to template to.' ) template_parser.add_argument( '-r', '--role-arn', required=True, help='ARN of the Role to template with.' ) template_parser.add_argument( '-o', '--output', help='Name of file to output the template to.' ) ## # Invocation ## invoke_parser = subparsers.add_parser( 'invoke', parents=[env_parser], help='Invoke remote function.' ) invoke_parser.add_argument( '--raw', action='store_true', help=('When invoking remotely, invoke this python as a string,' ' not as a modular path.') ) invoke_parser.add_argument( '--no-color', action='store_true', help=("Don't color the output") ) invoke_parser.add_argument('command_rest') ## # Manage ## manage_parser = subparsers.add_parser( 'manage', help='Invoke remote Django manage.py commands.' ) rest_help = ("Command in the form of <env> <command>. <env> is not " "required if --all is specified") manage_parser.add_argument('--all', action='store_true', help=all_help) manage_parser.add_argument('command_rest', nargs='+', help=rest_help) manage_parser.add_argument( '--no-color', action='store_true', help=("Don't color the output") ) # This is explicitly added here because this is the only subcommand that doesn't inherit from env_parser # https://github.com/Miserlou/Zappa/issues/1002 manage_parser.add_argument( '-s', '--settings_file', help='The path to a Zappa settings file.' ) ## # Rollback ## def positive_int(s): """ Ensure an arg is positive """ i = int(s) if i < 0: msg = "This argument must be positive (got {})".format(s) raise argparse.ArgumentTypeError(msg) return i rollback_parser = subparsers.add_parser( 'rollback', parents=[env_parser], help='Rollback deployed code to a previous version.' ) rollback_parser.add_argument( '-n', '--num-rollback', type=positive_int, default=1, help='The number of versions to rollback.' ) ## # Scheduling ## subparsers.add_parser( 'schedule', parents=[env_parser], help='Schedule functions to occur at regular intervals.' ) ## # Status ## status_parser = subparsers.add_parser( 'status', parents=[env_parser], help='Show deployment status and event schedules.' ) ## # Log Tailing ## tail_parser = subparsers.add_parser( 'tail', parents=[env_parser], help='Tail deployment logs.' ) tail_parser.add_argument( '--no-color', action='store_true', help="Don't color log tail output." ) tail_parser.add_argument( '--http', action='store_true', help='Only show HTTP requests in tail output.' ) tail_parser.add_argument( '--non-http', action='store_true', help='Only show non-HTTP requests in tail output.' ) tail_parser.add_argument( '--since', type=str, default="100000s", help="Only show lines since a certain timeframe." ) tail_parser.add_argument( '--filter', type=str, default="", help="Apply a filter pattern to the logs." ) tail_parser.add_argument( '--force-color', action='store_true', help='Force coloring log tail output even if coloring support is not auto-detected. (example: piping)' ) tail_parser.add_argument( '--disable-keep-open', action='store_true', help="Exit after printing the last available log, rather than keeping the log open." ) ## # Undeploy ## undeploy_parser = subparsers.add_parser( 'undeploy', parents=[env_parser], help='Undeploy application.' ) undeploy_parser.add_argument( '--remove-logs', action='store_true', help=('Removes log groups of api gateway and lambda task' ' during the undeployment.'), ) undeploy_parser.add_argument( '-y', '--yes', action='store_true', help='Auto confirm yes.' ) ## # Unschedule ## subparsers.add_parser('unschedule', parents=[env_parser], help='Unschedule functions.') ## # Updating ## update_parser = subparsers.add_parser( 'update', parents=[env_parser], help='Update deployed application.' ) update_parser.add_argument( '-z', '--zip', help='Update Lambda with specific local or S3 hosted zip package' ) update_parser.add_argument( '-n', '--no-upload', help="Update configuration where appropriate, but don't upload new code" ) ## # Debug ## subparsers.add_parser( 'shell', parents=[env_parser], help='A debug shell with a loaded Zappa object.' ) argcomplete.autocomplete(parser) args = parser.parse_args(argv) self.vargs = vars(args) if args.color == 'never': disable_click_colors() elif args.color == 'always': #TODO: Support aggressive coloring like "--force-color" on all commands pass elif args.color == 'auto': pass # Parse the input # NOTE(rmoe): Special case for manage command # The manage command can't have both stage_env and command_rest # arguments. Since they are both positional arguments argparse can't # differentiate the two. This causes problems when used with --all. # (e.g. "manage --all showmigrations admin" argparse thinks --all has # been specified AND that stage_env='showmigrations') # By having command_rest collect everything but --all we can split it # apart here instead of relying on argparse. if not args.command: parser.print_help() return if args.command == 'manage' and not self.vargs.get('all'): self.stage_env = self.vargs['command_rest'].pop(0) else: self.stage_env = self.vargs.get('stage_env') if args.command == 'package': self.load_credentials = False self.command = args.command self.disable_progress = self.vargs.get('disable_progress') if self.vargs.get('quiet'): self.silence() # We don't have any settings yet, so make those first! # (Settings-based interactions will fail # before a project has been initialized.) if self.command == 'init': self.init() return # Make sure there isn't a new version available if not self.vargs.get('json'): self.check_for_update() # Load and Validate Settings File self.load_settings_file(self.vargs.get('settings_file')) # Should we execute this for all stages, or just one? all_stages = self.vargs.get('all') stages = [] if all_stages: # All stages! stages = self.zappa_settings.keys() else: # Just one env. if not self.stage_env: # If there's only one stage defined in the settings, # use that as the default. if len(self.zappa_settings.keys()) == 1: stages.append(list(self.zappa_settings.keys())[0]) else: parser.error("Please supply a stage to interact with.") else: stages.append(self.stage_env) for stage in stages: try: self.dispatch_command(self.command, stage) except ClickException as e: # Discussion on exit codes: https://github.com/Miserlou/Zappa/issues/407 e.show() sys.exit(e.exit_code)
[ "def", "handle", "(", "self", ",", "argv", "=", "None", ")", ":", "desc", "=", "(", "'Zappa - Deploy Python applications to AWS Lambda'", "' and API Gateway.\\n'", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "desc", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "pkg_resources", ".", "get_distribution", "(", "\"zappa\"", ")", ".", "version", ",", "help", "=", "'Print the zappa version'", ")", "parser", ".", "add_argument", "(", "'--color'", ",", "default", "=", "'auto'", ",", "choices", "=", "[", "'auto'", ",", "'never'", ",", "'always'", "]", ")", "env_parser", "=", "argparse", ".", "ArgumentParser", "(", "add_help", "=", "False", ")", "me_group", "=", "env_parser", ".", "add_mutually_exclusive_group", "(", ")", "all_help", "=", "(", "'Execute this command for all of our defined '", "'Zappa stages.'", ")", "me_group", ".", "add_argument", "(", "'--all'", ",", "action", "=", "'store_true'", ",", "help", "=", "all_help", ")", "me_group", ".", "add_argument", "(", "'stage_env'", ",", "nargs", "=", "'?'", ")", "group", "=", "env_parser", ".", "add_argument_group", "(", ")", "group", ".", "add_argument", "(", "'-a'", ",", "'--app_function'", ",", "help", "=", "'The WSGI application function.'", ")", "group", ".", "add_argument", "(", "'-s'", ",", "'--settings_file'", ",", "help", "=", "'The path to a Zappa settings file.'", ")", "group", ".", "add_argument", "(", "'-q'", ",", "'--quiet'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Silence all output.'", ")", "# https://github.com/Miserlou/Zappa/issues/407", "# Moved when 'template' command added.", "# Fuck Terraform.", "group", ".", "add_argument", "(", "'-j'", ",", "'--json'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Make the output of this command be machine readable.'", ")", "# https://github.com/Miserlou/Zappa/issues/891", "group", ".", "add_argument", "(", "'--disable_progress'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Disable progress bars.'", ")", "##", "# Certify", "##", "subparsers", "=", "parser", ".", "add_subparsers", "(", "title", "=", "'subcommands'", ",", "dest", "=", "'command'", ")", "cert_parser", "=", "subparsers", ".", "add_parser", "(", "'certify'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Create and install SSL certificate'", ")", "cert_parser", ".", "add_argument", "(", "'--manual'", ",", "action", "=", "'store_true'", ",", "help", "=", "(", "\"Gets new Let's Encrypt certificates, but prints them to console.\"", "\"Does not update API Gateway domains.\"", ")", ")", "cert_parser", ".", "add_argument", "(", "'-y'", ",", "'--yes'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Auto confirm yes.'", ")", "##", "# Deploy", "##", "deploy_parser", "=", "subparsers", ".", "add_parser", "(", "'deploy'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Deploy application.'", ")", "deploy_parser", ".", "add_argument", "(", "'-z'", ",", "'--zip'", ",", "help", "=", "'Deploy Lambda with specific local or S3 hosted zip package'", ")", "##", "# Init", "##", "init_parser", "=", "subparsers", ".", "add_parser", "(", "'init'", ",", "help", "=", "'Initialize Zappa app.'", ")", "##", "# Package", "##", "package_parser", "=", "subparsers", ".", "add_parser", "(", "'package'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Build the application zip package locally.'", ")", "package_parser", ".", "add_argument", "(", "'-o'", ",", "'--output'", ",", "help", "=", "'Name of file to output the package to.'", ")", "##", "# Template", "##", "template_parser", "=", "subparsers", ".", "add_parser", "(", "'template'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Create a CloudFormation template for this API Gateway.'", ")", "template_parser", ".", "add_argument", "(", "'-l'", ",", "'--lambda-arn'", ",", "required", "=", "True", ",", "help", "=", "'ARN of the Lambda function to template to.'", ")", "template_parser", ".", "add_argument", "(", "'-r'", ",", "'--role-arn'", ",", "required", "=", "True", ",", "help", "=", "'ARN of the Role to template with.'", ")", "template_parser", ".", "add_argument", "(", "'-o'", ",", "'--output'", ",", "help", "=", "'Name of file to output the template to.'", ")", "##", "# Invocation", "##", "invoke_parser", "=", "subparsers", ".", "add_parser", "(", "'invoke'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Invoke remote function.'", ")", "invoke_parser", ".", "add_argument", "(", "'--raw'", ",", "action", "=", "'store_true'", ",", "help", "=", "(", "'When invoking remotely, invoke this python as a string,'", "' not as a modular path.'", ")", ")", "invoke_parser", ".", "add_argument", "(", "'--no-color'", ",", "action", "=", "'store_true'", ",", "help", "=", "(", "\"Don't color the output\"", ")", ")", "invoke_parser", ".", "add_argument", "(", "'command_rest'", ")", "##", "# Manage", "##", "manage_parser", "=", "subparsers", ".", "add_parser", "(", "'manage'", ",", "help", "=", "'Invoke remote Django manage.py commands.'", ")", "rest_help", "=", "(", "\"Command in the form of <env> <command>. <env> is not \"", "\"required if --all is specified\"", ")", "manage_parser", ".", "add_argument", "(", "'--all'", ",", "action", "=", "'store_true'", ",", "help", "=", "all_help", ")", "manage_parser", ".", "add_argument", "(", "'command_rest'", ",", "nargs", "=", "'+'", ",", "help", "=", "rest_help", ")", "manage_parser", ".", "add_argument", "(", "'--no-color'", ",", "action", "=", "'store_true'", ",", "help", "=", "(", "\"Don't color the output\"", ")", ")", "# This is explicitly added here because this is the only subcommand that doesn't inherit from env_parser", "# https://github.com/Miserlou/Zappa/issues/1002", "manage_parser", ".", "add_argument", "(", "'-s'", ",", "'--settings_file'", ",", "help", "=", "'The path to a Zappa settings file.'", ")", "##", "# Rollback", "##", "def", "positive_int", "(", "s", ")", ":", "\"\"\" Ensure an arg is positive \"\"\"", "i", "=", "int", "(", "s", ")", "if", "i", "<", "0", ":", "msg", "=", "\"This argument must be positive (got {})\"", ".", "format", "(", "s", ")", "raise", "argparse", ".", "ArgumentTypeError", "(", "msg", ")", "return", "i", "rollback_parser", "=", "subparsers", ".", "add_parser", "(", "'rollback'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Rollback deployed code to a previous version.'", ")", "rollback_parser", ".", "add_argument", "(", "'-n'", ",", "'--num-rollback'", ",", "type", "=", "positive_int", ",", "default", "=", "1", ",", "help", "=", "'The number of versions to rollback.'", ")", "##", "# Scheduling", "##", "subparsers", ".", "add_parser", "(", "'schedule'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Schedule functions to occur at regular intervals.'", ")", "##", "# Status", "##", "status_parser", "=", "subparsers", ".", "add_parser", "(", "'status'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Show deployment status and event schedules.'", ")", "##", "# Log Tailing", "##", "tail_parser", "=", "subparsers", ".", "add_parser", "(", "'tail'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Tail deployment logs.'", ")", "tail_parser", ".", "add_argument", "(", "'--no-color'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Don't color log tail output.\"", ")", "tail_parser", ".", "add_argument", "(", "'--http'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Only show HTTP requests in tail output.'", ")", "tail_parser", ".", "add_argument", "(", "'--non-http'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Only show non-HTTP requests in tail output.'", ")", "tail_parser", ".", "add_argument", "(", "'--since'", ",", "type", "=", "str", ",", "default", "=", "\"100000s\"", ",", "help", "=", "\"Only show lines since a certain timeframe.\"", ")", "tail_parser", ".", "add_argument", "(", "'--filter'", ",", "type", "=", "str", ",", "default", "=", "\"\"", ",", "help", "=", "\"Apply a filter pattern to the logs.\"", ")", "tail_parser", ".", "add_argument", "(", "'--force-color'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Force coloring log tail output even if coloring support is not auto-detected. (example: piping)'", ")", "tail_parser", ".", "add_argument", "(", "'--disable-keep-open'", ",", "action", "=", "'store_true'", ",", "help", "=", "\"Exit after printing the last available log, rather than keeping the log open.\"", ")", "##", "# Undeploy", "##", "undeploy_parser", "=", "subparsers", ".", "add_parser", "(", "'undeploy'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Undeploy application.'", ")", "undeploy_parser", ".", "add_argument", "(", "'--remove-logs'", ",", "action", "=", "'store_true'", ",", "help", "=", "(", "'Removes log groups of api gateway and lambda task'", "' during the undeployment.'", ")", ",", ")", "undeploy_parser", ".", "add_argument", "(", "'-y'", ",", "'--yes'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Auto confirm yes.'", ")", "##", "# Unschedule", "##", "subparsers", ".", "add_parser", "(", "'unschedule'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Unschedule functions.'", ")", "##", "# Updating", "##", "update_parser", "=", "subparsers", ".", "add_parser", "(", "'update'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'Update deployed application.'", ")", "update_parser", ".", "add_argument", "(", "'-z'", ",", "'--zip'", ",", "help", "=", "'Update Lambda with specific local or S3 hosted zip package'", ")", "update_parser", ".", "add_argument", "(", "'-n'", ",", "'--no-upload'", ",", "help", "=", "\"Update configuration where appropriate, but don't upload new code\"", ")", "##", "# Debug", "##", "subparsers", ".", "add_parser", "(", "'shell'", ",", "parents", "=", "[", "env_parser", "]", ",", "help", "=", "'A debug shell with a loaded Zappa object.'", ")", "argcomplete", ".", "autocomplete", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", "argv", ")", "self", ".", "vargs", "=", "vars", "(", "args", ")", "if", "args", ".", "color", "==", "'never'", ":", "disable_click_colors", "(", ")", "elif", "args", ".", "color", "==", "'always'", ":", "#TODO: Support aggressive coloring like \"--force-color\" on all commands", "pass", "elif", "args", ".", "color", "==", "'auto'", ":", "pass", "# Parse the input", "# NOTE(rmoe): Special case for manage command", "# The manage command can't have both stage_env and command_rest", "# arguments. Since they are both positional arguments argparse can't", "# differentiate the two. This causes problems when used with --all.", "# (e.g. \"manage --all showmigrations admin\" argparse thinks --all has", "# been specified AND that stage_env='showmigrations')", "# By having command_rest collect everything but --all we can split it", "# apart here instead of relying on argparse.", "if", "not", "args", ".", "command", ":", "parser", ".", "print_help", "(", ")", "return", "if", "args", ".", "command", "==", "'manage'", "and", "not", "self", ".", "vargs", ".", "get", "(", "'all'", ")", ":", "self", ".", "stage_env", "=", "self", ".", "vargs", "[", "'command_rest'", "]", ".", "pop", "(", "0", ")", "else", ":", "self", ".", "stage_env", "=", "self", ".", "vargs", ".", "get", "(", "'stage_env'", ")", "if", "args", ".", "command", "==", "'package'", ":", "self", ".", "load_credentials", "=", "False", "self", ".", "command", "=", "args", ".", "command", "self", ".", "disable_progress", "=", "self", ".", "vargs", ".", "get", "(", "'disable_progress'", ")", "if", "self", ".", "vargs", ".", "get", "(", "'quiet'", ")", ":", "self", ".", "silence", "(", ")", "# We don't have any settings yet, so make those first!", "# (Settings-based interactions will fail", "# before a project has been initialized.)", "if", "self", ".", "command", "==", "'init'", ":", "self", ".", "init", "(", ")", "return", "# Make sure there isn't a new version available", "if", "not", "self", ".", "vargs", ".", "get", "(", "'json'", ")", ":", "self", ".", "check_for_update", "(", ")", "# Load and Validate Settings File", "self", ".", "load_settings_file", "(", "self", ".", "vargs", ".", "get", "(", "'settings_file'", ")", ")", "# Should we execute this for all stages, or just one?", "all_stages", "=", "self", ".", "vargs", ".", "get", "(", "'all'", ")", "stages", "=", "[", "]", "if", "all_stages", ":", "# All stages!", "stages", "=", "self", ".", "zappa_settings", ".", "keys", "(", ")", "else", ":", "# Just one env.", "if", "not", "self", ".", "stage_env", ":", "# If there's only one stage defined in the settings,", "# use that as the default.", "if", "len", "(", "self", ".", "zappa_settings", ".", "keys", "(", ")", ")", "==", "1", ":", "stages", ".", "append", "(", "list", "(", "self", ".", "zappa_settings", ".", "keys", "(", ")", ")", "[", "0", "]", ")", "else", ":", "parser", ".", "error", "(", "\"Please supply a stage to interact with.\"", ")", "else", ":", "stages", ".", "append", "(", "self", ".", "stage_env", ")", "for", "stage", "in", "stages", ":", "try", ":", "self", ".", "dispatch_command", "(", "self", ".", "command", ",", "stage", ")", "except", "ClickException", "as", "e", ":", "# Discussion on exit codes: https://github.com/Miserlou/Zappa/issues/407", "e", ".", "show", "(", ")", "sys", ".", "exit", "(", "e", ".", "exit_code", ")" ]
34.718563
22.919162
def get_all_styles(document, style): """Returns list of styles on which specified style is based on. :Args: - document (:class:`ooxml.doc.Document`): Document object - style (:class:`ooxml.doc.Style`): Style object :Returns: List of style objects. """ classes = [] while True: classes.insert(0, get_style_name(style)) if style.based_on: style = document.styles.get_by_id(style.based_on) else: break return classes
[ "def", "get_all_styles", "(", "document", ",", "style", ")", ":", "classes", "=", "[", "]", "while", "True", ":", "classes", ".", "insert", "(", "0", ",", "get_style_name", "(", "style", ")", ")", "if", "style", ".", "based_on", ":", "style", "=", "document", ".", "styles", ".", "get_by_id", "(", "style", ".", "based_on", ")", "else", ":", "break", "return", "classes" ]
22.363636
23.363636
def max_ret_portfolio(exp_rets): """ Computes a long-only maximum return portfolio, i.e. selects the assets with maximal return. If there is more than one asset with maximal return, equally weight all of them. Parameters ---------- exp_rets: pandas.Series Expected asset returns (often historical returns). Returns ------- weights: pandas.Series Optimal asset weights. """ if not isinstance(exp_rets, pd.Series): raise ValueError("Expected returns is not a Series") weights = exp_rets[:] weights[weights == weights.max()] = 1.0 weights[weights != weights.max()] = 0.0 weights /= weights.sum() return weights
[ "def", "max_ret_portfolio", "(", "exp_rets", ")", ":", "if", "not", "isinstance", "(", "exp_rets", ",", "pd", ".", "Series", ")", ":", "raise", "ValueError", "(", "\"Expected returns is not a Series\"", ")", "weights", "=", "exp_rets", "[", ":", "]", "weights", "[", "weights", "==", "weights", ".", "max", "(", ")", "]", "=", "1.0", "weights", "[", "weights", "!=", "weights", ".", "max", "(", ")", "]", "=", "0.0", "weights", "/=", "weights", ".", "sum", "(", ")", "return", "weights" ]
27.32
18.76
def _findAll(self, **kwargs): """Return a list of all children that match the specified criteria.""" result = [] for item in self._generateFind(**kwargs): result.append(item) return result
[ "def", "_findAll", "(", "self", ",", "*", "*", "kwargs", ")", ":", "result", "=", "[", "]", "for", "item", "in", "self", ".", "_generateFind", "(", "*", "*", "kwargs", ")", ":", "result", ".", "append", "(", "item", ")", "return", "result" ]
37.833333
11.5
def process_module(self, node): ''' process a module the module's content is accessible via node.file_stream object ''' nodepaths = [] if not isinstance(node.path, list): nodepaths = [node.path] else: nodepaths = node.path for node_path in nodepaths: if node_path not in _PROCESSED_NODES: stylechecker = StyleGuide( parse_argv=False, config_file=False, quiet=2, reporter=PyLintPEP8Reporter ) _PROCESSED_NODES[node_path] = stylechecker.check_files([node_path]) for code, lineno, text in _PROCESSED_NODES[node_path].locations: pylintcode = '{0}8{1}'.format(code[0], code[1:]) if pylintcode in self.msgs_map: # This will be handled by PyLint itself, skip it continue if pylintcode not in _KNOWN_PEP8_IDS: if pylintcode not in _UNHANDLED_PEP8_IDS: _UNHANDLED_PEP8_IDS.append(pylintcode) msg = 'The following code, {0}, was not handled by the PEP8 plugin'.format(pylintcode) if logging.root.handlers: logging.getLogger(__name__).warning(msg) else: sys.stderr.write('{0}\n'.format(msg)) continue if pylintcode not in self._msgs: # Not for our class implementation to handle continue if code in ('E111', 'E113'): if _PROCESSED_NODES[node_path].lines[lineno-1].strip().startswith('#'): # If E111 is triggered in a comment I consider it, at # least, bad judgement. See https://github.com/jcrocholl/pep8/issues/300 # If E113 is triggered in comments, which I consider a bug, # skip it. See https://github.com/jcrocholl/pep8/issues/274 continue try: self.add_message(pylintcode, line=lineno, args=(code, text)) except TypeError as exc: if 'not all arguments' not in str(exc): raise # Message does not support being passed the text arg self.add_message(pylintcode, line=lineno, args=(code,))
[ "def", "process_module", "(", "self", ",", "node", ")", ":", "nodepaths", "=", "[", "]", "if", "not", "isinstance", "(", "node", ".", "path", ",", "list", ")", ":", "nodepaths", "=", "[", "node", ".", "path", "]", "else", ":", "nodepaths", "=", "node", ".", "path", "for", "node_path", "in", "nodepaths", ":", "if", "node_path", "not", "in", "_PROCESSED_NODES", ":", "stylechecker", "=", "StyleGuide", "(", "parse_argv", "=", "False", ",", "config_file", "=", "False", ",", "quiet", "=", "2", ",", "reporter", "=", "PyLintPEP8Reporter", ")", "_PROCESSED_NODES", "[", "node_path", "]", "=", "stylechecker", ".", "check_files", "(", "[", "node_path", "]", ")", "for", "code", ",", "lineno", ",", "text", "in", "_PROCESSED_NODES", "[", "node_path", "]", ".", "locations", ":", "pylintcode", "=", "'{0}8{1}'", ".", "format", "(", "code", "[", "0", "]", ",", "code", "[", "1", ":", "]", ")", "if", "pylintcode", "in", "self", ".", "msgs_map", ":", "# This will be handled by PyLint itself, skip it", "continue", "if", "pylintcode", "not", "in", "_KNOWN_PEP8_IDS", ":", "if", "pylintcode", "not", "in", "_UNHANDLED_PEP8_IDS", ":", "_UNHANDLED_PEP8_IDS", ".", "append", "(", "pylintcode", ")", "msg", "=", "'The following code, {0}, was not handled by the PEP8 plugin'", ".", "format", "(", "pylintcode", ")", "if", "logging", ".", "root", ".", "handlers", ":", "logging", ".", "getLogger", "(", "__name__", ")", ".", "warning", "(", "msg", ")", "else", ":", "sys", ".", "stderr", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "msg", ")", ")", "continue", "if", "pylintcode", "not", "in", "self", ".", "_msgs", ":", "# Not for our class implementation to handle", "continue", "if", "code", "in", "(", "'E111'", ",", "'E113'", ")", ":", "if", "_PROCESSED_NODES", "[", "node_path", "]", ".", "lines", "[", "lineno", "-", "1", "]", ".", "strip", "(", ")", ".", "startswith", "(", "'#'", ")", ":", "# If E111 is triggered in a comment I consider it, at", "# least, bad judgement. See https://github.com/jcrocholl/pep8/issues/300", "# If E113 is triggered in comments, which I consider a bug,", "# skip it. See https://github.com/jcrocholl/pep8/issues/274", "continue", "try", ":", "self", ".", "add_message", "(", "pylintcode", ",", "line", "=", "lineno", ",", "args", "=", "(", "code", ",", "text", ")", ")", "except", "TypeError", "as", "exc", ":", "if", "'not all arguments'", "not", "in", "str", "(", "exc", ")", ":", "raise", "# Message does not support being passed the text arg", "self", ".", "add_message", "(", "pylintcode", ",", "line", "=", "lineno", ",", "args", "=", "(", "code", ",", ")", ")" ]
43.928571
24.428571
def load(self, path): """Load sound from wave file. Uses :attr:`Waveform.load`, but will set the Waveform's name based on the sound filename. """ (folder, filename) = os.path.split(path) (name, extension) = os.path.splitext(filename) return Sound(name, Waveform.load(path))
[ "def", "load", "(", "self", ",", "path", ")", ":", "(", "folder", ",", "filename", ")", "=", "os", ".", "path", ".", "split", "(", "path", ")", "(", "name", ",", "extension", ")", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "return", "Sound", "(", "name", ",", "Waveform", ".", "load", "(", "path", ")", ")" ]
32.2
17.8
def from_dict(cls, d): """ Generates an ion object from a dict created by as_dict(). Args: d: {symbol: amount} dict. """ charge = d.pop('charge') composition = Composition(d) return Ion(composition, charge)
[ "def", "from_dict", "(", "cls", ",", "d", ")", ":", "charge", "=", "d", ".", "pop", "(", "'charge'", ")", "composition", "=", "Composition", "(", "d", ")", "return", "Ion", "(", "composition", ",", "charge", ")" ]
25.545455
13.727273
def drop_retention_policy(self, name, database=None): """Drop an existing retention policy for a database. :param name: the name of the retention policy to drop :type name: str :param database: the database for which the retention policy is dropped. Defaults to current client's database :type database: str """ query_string = ( "DROP RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database)) self.query(query_string, method="POST")
[ "def", "drop_retention_policy", "(", "self", ",", "name", ",", "database", "=", "None", ")", ":", "query_string", "=", "(", "\"DROP RETENTION POLICY {0} ON {1}\"", ")", ".", "format", "(", "quote_ident", "(", "name", ")", ",", "quote_ident", "(", "database", "or", "self", ".", "_database", ")", ")", "self", ".", "query", "(", "query_string", ",", "method", "=", "\"POST\"", ")" ]
42.846154
16.769231
def run(input_file, options): """Runs the Generator using parsed options.""" options.logger = log generator.Generator(input_file, **options.__dict__).execute()
[ "def", "run", "(", "input_file", ",", "options", ")", ":", "options", ".", "logger", "=", "log", "generator", ".", "Generator", "(", "input_file", ",", "*", "*", "options", ".", "__dict__", ")", ".", "execute", "(", ")" ]
33.6
18.4
def scan(backend, timeout=10): """Scan for miflora devices. Note: this must be run as root! """ result = [] for (mac, name) in backend.scan_for_devices(timeout): if (name is not None and name.lower() in VALID_DEVICE_NAMES) or \ mac is not None and mac.upper().startswith(DEVICE_PREFIX): result.append(mac.upper()) return result
[ "def", "scan", "(", "backend", ",", "timeout", "=", "10", ")", ":", "result", "=", "[", "]", "for", "(", "mac", ",", "name", ")", "in", "backend", ".", "scan_for_devices", "(", "timeout", ")", ":", "if", "(", "name", "is", "not", "None", "and", "name", ".", "lower", "(", ")", "in", "VALID_DEVICE_NAMES", ")", "or", "mac", "is", "not", "None", "and", "mac", ".", "upper", "(", ")", ".", "startswith", "(", "DEVICE_PREFIX", ")", ":", "result", ".", "append", "(", "mac", ".", "upper", "(", ")", ")", "return", "result" ]
34.363636
17.181818
def cancelReserveRN(self, CorpNum, RequestNum, UserID=None): """ 문자 예약전송 취소 args CorpNum : 팝빌회원 사업자번호 RequestNum : 전송요청시 할당한 전송요청번호 UserID : 팝빌회원 아이디 return 처리결과. consist of code and message raise PopbillException """ if RequestNum == None or RequestNum == '': raise PopbillException(-99999999, "요청번호가 입력되지 않았습니다.") return self._httpget('/Message/Cancel/' + RequestNum, CorpNum, UserID)
[ "def", "cancelReserveRN", "(", "self", ",", "CorpNum", ",", "RequestNum", ",", "UserID", "=", "None", ")", ":", "if", "RequestNum", "==", "None", "or", "RequestNum", "==", "''", ":", "raise", "PopbillException", "(", "-", "99999999", ",", "\"요청번호가 입력되지 않았습니다.\")\r", "", "return", "self", ".", "_httpget", "(", "'/Message/Cancel/'", "+", "RequestNum", ",", "CorpNum", ",", "UserID", ")" ]
36.466667
15.6
def uninstall(client): """Uninstall Git hooks.""" from git.index.fun import hook_path as get_hook_path for hook in HOOKS: hook_path = Path(get_hook_path(hook, client.repo.git_dir)) if hook_path.exists(): hook_path.unlink()
[ "def", "uninstall", "(", "client", ")", ":", "from", "git", ".", "index", ".", "fun", "import", "hook_path", "as", "get_hook_path", "for", "hook", "in", "HOOKS", ":", "hook_path", "=", "Path", "(", "get_hook_path", "(", "hook", ",", "client", ".", "repo", ".", "git_dir", ")", ")", "if", "hook_path", ".", "exists", "(", ")", ":", "hook_path", ".", "unlink", "(", ")" ]
32
17.25
def save(self, x509): """Persist this x509 object to disk""" self.x509 = x509 with open_tls_file(self.file_path, 'w', private=self.is_private()) as fh: fh.write(str(self))
[ "def", "save", "(", "self", ",", "x509", ")", ":", "self", ".", "x509", "=", "x509", "with", "open_tls_file", "(", "self", ".", "file_path", ",", "'w'", ",", "private", "=", "self", ".", "is_private", "(", ")", ")", "as", "fh", ":", "fh", ".", "write", "(", "str", "(", "self", ")", ")" ]
32.714286
15.857143
def figure_buffer(figs): '''Extract raw image buffer from matplotlib figure shaped as 1xHxWx3.''' assert len(figs) > 0, 'No figure buffers given. Forgot to return from draw call?' buffers = [] w, h = figs[0].canvas.get_width_height() for f in figs: wf, hf = f.canvas.get_width_height() assert wf == w and hf == h, 'All canvas objects need to have same size' buffers.append(np.fromstring(f.canvas.tostring_rgb(), dtype=np.uint8).reshape(h, w, 3)) return np.stack(buffers)
[ "def", "figure_buffer", "(", "figs", ")", ":", "assert", "len", "(", "figs", ")", ">", "0", ",", "'No figure buffers given. Forgot to return from draw call?'", "buffers", "=", "[", "]", "w", ",", "h", "=", "figs", "[", "0", "]", ".", "canvas", ".", "get_width_height", "(", ")", "for", "f", "in", "figs", ":", "wf", ",", "hf", "=", "f", ".", "canvas", ".", "get_width_height", "(", ")", "assert", "wf", "==", "w", "and", "hf", "==", "h", ",", "'All canvas objects need to have same size'", "buffers", ".", "append", "(", "np", ".", "fromstring", "(", "f", ".", "canvas", ".", "tostring_rgb", "(", ")", ",", "dtype", "=", "np", ".", "uint8", ")", ".", "reshape", "(", "h", ",", "w", ",", "3", ")", ")", "return", "np", ".", "stack", "(", "buffers", ")" ]
46.272727
27
def _clean_multiple_def(self, ready): """Cleans the list of variable definitions extracted from the definition text to get hold of the dimensions and default values. """ result = [] for entry in ready: if isinstance(entry, list): #This variable declaration has a default value specified, which is in the #second slot of the list. default = self._collapse_default(entry[1]) #For hard-coded array defaults, add the parenthesis back in. if default[0] == "/": default = "({})".format(default) namedim = entry[0] else: default = None namedim = entry if isinstance(namedim, str): name = namedim.strip().strip(",") dimension = None D = 0 else: #Namedim is a tuple of (name, dimension) name = namedim[0].strip() D = count_dimensions(namedim[1]) dimension = self._collapse_default(namedim[1]) result.append((name, dimension, default, D)) return result
[ "def", "_clean_multiple_def", "(", "self", ",", "ready", ")", ":", "result", "=", "[", "]", "for", "entry", "in", "ready", ":", "if", "isinstance", "(", "entry", ",", "list", ")", ":", "#This variable declaration has a default value specified, which is in the", "#second slot of the list.", "default", "=", "self", ".", "_collapse_default", "(", "entry", "[", "1", "]", ")", "#For hard-coded array defaults, add the parenthesis back in.", "if", "default", "[", "0", "]", "==", "\"/\"", ":", "default", "=", "\"({})\"", ".", "format", "(", "default", ")", "namedim", "=", "entry", "[", "0", "]", "else", ":", "default", "=", "None", "namedim", "=", "entry", "if", "isinstance", "(", "namedim", ",", "str", ")", ":", "name", "=", "namedim", ".", "strip", "(", ")", ".", "strip", "(", "\",\"", ")", "dimension", "=", "None", "D", "=", "0", "else", ":", "#Namedim is a tuple of (name, dimension)", "name", "=", "namedim", "[", "0", "]", ".", "strip", "(", ")", "D", "=", "count_dimensions", "(", "namedim", "[", "1", "]", ")", "dimension", "=", "self", ".", "_collapse_default", "(", "namedim", "[", "1", "]", ")", "result", ".", "append", "(", "(", "name", ",", "dimension", ",", "default", ",", "D", ")", ")", "return", "result" ]
39.433333
14.666667
def su(self) -> 'Gate': """Convert gate tensor to the special unitary group.""" rank = 2**self.qubit_nb U = asarray(self.asoperator()) U /= np.linalg.det(U) ** (1/rank) return Gate(tensor=U, qubits=self.qubits)
[ "def", "su", "(", "self", ")", "->", "'Gate'", ":", "rank", "=", "2", "**", "self", ".", "qubit_nb", "U", "=", "asarray", "(", "self", ".", "asoperator", "(", ")", ")", "U", "/=", "np", ".", "linalg", ".", "det", "(", "U", ")", "**", "(", "1", "/", "rank", ")", "return", "Gate", "(", "tensor", "=", "U", ",", "qubits", "=", "self", ".", "qubits", ")" ]
40.833333
6.333333
def set_register(self, motors): """ Gets the value from :class:`~pypot.dynamixel.motor.DxlMotor` and sets it to the specified register. """ if not motors: return ids = [m.id for m in motors] values = (m.__dict__[self.varname] for m in motors) getattr(self.io, 'set_{}'.format(self.regname))(dict(zip(ids, values))) for m in motors: m._write_synced[self.varname].done()
[ "def", "set_register", "(", "self", ",", "motors", ")", ":", "if", "not", "motors", ":", "return", "ids", "=", "[", "m", ".", "id", "for", "m", "in", "motors", "]", "values", "=", "(", "m", ".", "__dict__", "[", "self", ".", "varname", "]", "for", "m", "in", "motors", ")", "getattr", "(", "self", ".", "io", ",", "'set_{}'", ".", "format", "(", "self", ".", "regname", ")", ")", "(", "dict", "(", "zip", "(", "ids", ",", "values", ")", ")", ")", "for", "m", "in", "motors", ":", "m", ".", "_write_synced", "[", "self", ".", "varname", "]", ".", "done", "(", ")" ]
39.272727
19.545455
def do_node_set(self, element, decl, pseudo): """Implement node-set declaration.""" target = serialize(decl.value).strip() step = self.state[self.state['current_step']] elem = self.current_target().tree _, valstep = self.lookup('pending', target) if not valstep: step['pending'][target] = [('nodeset', elem)] else: self.state[valstep]['pending'][target] = [('nodeset', elem)]
[ "def", "do_node_set", "(", "self", ",", "element", ",", "decl", ",", "pseudo", ")", ":", "target", "=", "serialize", "(", "decl", ".", "value", ")", ".", "strip", "(", ")", "step", "=", "self", ".", "state", "[", "self", ".", "state", "[", "'current_step'", "]", "]", "elem", "=", "self", ".", "current_target", "(", ")", ".", "tree", "_", ",", "valstep", "=", "self", ".", "lookup", "(", "'pending'", ",", "target", ")", "if", "not", "valstep", ":", "step", "[", "'pending'", "]", "[", "target", "]", "=", "[", "(", "'nodeset'", ",", "elem", ")", "]", "else", ":", "self", ".", "state", "[", "valstep", "]", "[", "'pending'", "]", "[", "target", "]", "=", "[", "(", "'nodeset'", ",", "elem", ")", "]" ]
44.6
12.9
def desc(self): """return the description of this endpoint""" doc = inspect.getdoc(self.controller_class) if not doc: doc = '' return doc
[ "def", "desc", "(", "self", ")", ":", "doc", "=", "inspect", ".", "getdoc", "(", "self", ".", "controller_class", ")", "if", "not", "doc", ":", "doc", "=", "''", "return", "doc" ]
33
14
def analyze_text(input_text, apis=DEFAULT_APIS, **kwargs): """ Given input text, returns the results of specified text apis. Possible apis include: [ 'text_tags', 'political', 'sentiment', 'language' ] Example usage: .. code-block:: python >>> import indicoio >>> text = 'Monday: Delightful with mostly sunny skies. Highs in the low 70s.' >>> results = indicoio.analyze_text(data = text, apis = ["language", "sentiment"]) >>> language_results = results["language"] >>> sentiment_results = results["sentiment"] :param text: The text to be analyzed. :param apis: List of apis to use. :type text: str or unicode :type apis: list of str :rtype: Dictionary of api responses """ cloud = kwargs.pop('cloud', None) batch = kwargs.pop('batch', False) api_key = kwargs.pop('api_key', None) return multi( data=input_text, datatype="text", cloud=cloud, batch=batch, api_key=api_key, apis=apis, accepted_apis=TEXT_APIS, **kwargs )
[ "def", "analyze_text", "(", "input_text", ",", "apis", "=", "DEFAULT_APIS", ",", "*", "*", "kwargs", ")", ":", "cloud", "=", "kwargs", ".", "pop", "(", "'cloud'", ",", "None", ")", "batch", "=", "kwargs", ".", "pop", "(", "'batch'", ",", "False", ")", "api_key", "=", "kwargs", ".", "pop", "(", "'api_key'", ",", "None", ")", "return", "multi", "(", "data", "=", "input_text", ",", "datatype", "=", "\"text\"", ",", "cloud", "=", "cloud", ",", "batch", "=", "batch", ",", "api_key", "=", "api_key", ",", "apis", "=", "apis", ",", "accepted_apis", "=", "TEXT_APIS", ",", "*", "*", "kwargs", ")" ]
31
19.057143
def snmp_server_group_write(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp") group = ET.SubElement(snmp_server, "group") group_name_key = ET.SubElement(group, "group-name") group_name_key.text = kwargs.pop('group_name') group_version_key = ET.SubElement(group, "group-version") group_version_key.text = kwargs.pop('group_version') write = ET.SubElement(group, "write") write.text = kwargs.pop('write') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "snmp_server_group_write", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "snmp_server", "=", "ET", ".", "SubElement", "(", "config", ",", "\"snmp-server\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-snmp\"", ")", "group", "=", "ET", ".", "SubElement", "(", "snmp_server", ",", "\"group\"", ")", "group_name_key", "=", "ET", ".", "SubElement", "(", "group", ",", "\"group-name\"", ")", "group_name_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'group_name'", ")", "group_version_key", "=", "ET", ".", "SubElement", "(", "group", ",", "\"group-version\"", ")", "group_version_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'group_version'", ")", "write", "=", "ET", ".", "SubElement", "(", "group", ",", "\"write\"", ")", "write", ".", "text", "=", "kwargs", ".", "pop", "(", "'write'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
45.666667
15.2
def extracting(self, *names, **kwargs): """Asserts that val is collection, then extracts the named properties or named zero-arg methods into a list (or list of tuples if multiple names are given).""" if not isinstance(self.val, Iterable): raise TypeError('val is not iterable') if isinstance(self.val, str_types): raise TypeError('val must not be string') if len(names) == 0: raise ValueError('one or more name args must be given') def _extract(x, name): if self._check_dict_like(x, check_values=False, return_as_bool=True): if name in x: return x[name] else: raise ValueError('item keys %s did not contain key <%s>' % (list(x.keys()), name)) elif isinstance(x, Iterable): self._check_iterable(x, name='item') return x[name] elif hasattr(x, name): attr = getattr(x, name) if callable(attr): try: return attr() except TypeError: raise ValueError('val method <%s()> exists, but is not zero-arg method' % name) else: return attr else: raise ValueError('val does not have property or zero-arg method <%s>' % name) def _filter(x): if 'filter' in kwargs: if isinstance(kwargs['filter'], str_types): return bool(_extract(x, kwargs['filter'])) elif self._check_dict_like(kwargs['filter'], check_values=False, return_as_bool=True): for k in kwargs['filter']: if isinstance(k, str_types): if _extract(x, k) != kwargs['filter'][k]: return False return True elif callable(kwargs['filter']): return kwargs['filter'](x) return False return True def _sort(x): if 'sort' in kwargs: if isinstance(kwargs['sort'], str_types): return _extract(x, kwargs['sort']) elif isinstance(kwargs['sort'], Iterable): items = [] for k in kwargs['sort']: if isinstance(k, str_types): items.append(_extract(x, k)) return tuple(items) elif callable(kwargs['sort']): return kwargs['sort'](x) return 0 extracted = [] for i in sorted(self.val, key=lambda x: _sort(x)): if _filter(i): items = [_extract(i, name) for name in names] extracted.append(tuple(items) if len(items) > 1 else items[0]) return AssertionBuilder(extracted, self.description, self.kind)
[ "def", "extracting", "(", "self", ",", "*", "names", ",", "*", "*", "kwargs", ")", ":", "if", "not", "isinstance", "(", "self", ".", "val", ",", "Iterable", ")", ":", "raise", "TypeError", "(", "'val is not iterable'", ")", "if", "isinstance", "(", "self", ".", "val", ",", "str_types", ")", ":", "raise", "TypeError", "(", "'val must not be string'", ")", "if", "len", "(", "names", ")", "==", "0", ":", "raise", "ValueError", "(", "'one or more name args must be given'", ")", "def", "_extract", "(", "x", ",", "name", ")", ":", "if", "self", ".", "_check_dict_like", "(", "x", ",", "check_values", "=", "False", ",", "return_as_bool", "=", "True", ")", ":", "if", "name", "in", "x", ":", "return", "x", "[", "name", "]", "else", ":", "raise", "ValueError", "(", "'item keys %s did not contain key <%s>'", "%", "(", "list", "(", "x", ".", "keys", "(", ")", ")", ",", "name", ")", ")", "elif", "isinstance", "(", "x", ",", "Iterable", ")", ":", "self", ".", "_check_iterable", "(", "x", ",", "name", "=", "'item'", ")", "return", "x", "[", "name", "]", "elif", "hasattr", "(", "x", ",", "name", ")", ":", "attr", "=", "getattr", "(", "x", ",", "name", ")", "if", "callable", "(", "attr", ")", ":", "try", ":", "return", "attr", "(", ")", "except", "TypeError", ":", "raise", "ValueError", "(", "'val method <%s()> exists, but is not zero-arg method'", "%", "name", ")", "else", ":", "return", "attr", "else", ":", "raise", "ValueError", "(", "'val does not have property or zero-arg method <%s>'", "%", "name", ")", "def", "_filter", "(", "x", ")", ":", "if", "'filter'", "in", "kwargs", ":", "if", "isinstance", "(", "kwargs", "[", "'filter'", "]", ",", "str_types", ")", ":", "return", "bool", "(", "_extract", "(", "x", ",", "kwargs", "[", "'filter'", "]", ")", ")", "elif", "self", ".", "_check_dict_like", "(", "kwargs", "[", "'filter'", "]", ",", "check_values", "=", "False", ",", "return_as_bool", "=", "True", ")", ":", "for", "k", "in", "kwargs", "[", "'filter'", "]", ":", "if", "isinstance", "(", "k", ",", "str_types", ")", ":", "if", "_extract", "(", "x", ",", "k", ")", "!=", "kwargs", "[", "'filter'", "]", "[", "k", "]", ":", "return", "False", "return", "True", "elif", "callable", "(", "kwargs", "[", "'filter'", "]", ")", ":", "return", "kwargs", "[", "'filter'", "]", "(", "x", ")", "return", "False", "return", "True", "def", "_sort", "(", "x", ")", ":", "if", "'sort'", "in", "kwargs", ":", "if", "isinstance", "(", "kwargs", "[", "'sort'", "]", ",", "str_types", ")", ":", "return", "_extract", "(", "x", ",", "kwargs", "[", "'sort'", "]", ")", "elif", "isinstance", "(", "kwargs", "[", "'sort'", "]", ",", "Iterable", ")", ":", "items", "=", "[", "]", "for", "k", "in", "kwargs", "[", "'sort'", "]", ":", "if", "isinstance", "(", "k", ",", "str_types", ")", ":", "items", ".", "append", "(", "_extract", "(", "x", ",", "k", ")", ")", "return", "tuple", "(", "items", ")", "elif", "callable", "(", "kwargs", "[", "'sort'", "]", ")", ":", "return", "kwargs", "[", "'sort'", "]", "(", "x", ")", "return", "0", "extracted", "=", "[", "]", "for", "i", "in", "sorted", "(", "self", ".", "val", ",", "key", "=", "lambda", "x", ":", "_sort", "(", "x", ")", ")", ":", "if", "_filter", "(", "i", ")", ":", "items", "=", "[", "_extract", "(", "i", ",", "name", ")", "for", "name", "in", "names", "]", "extracted", ".", "append", "(", "tuple", "(", "items", ")", "if", "len", "(", "items", ")", ">", "1", "else", "items", "[", "0", "]", ")", "return", "AssertionBuilder", "(", "extracted", ",", "self", ".", "description", ",", "self", ".", "kind", ")" ]
44.892308
17.307692
def iuwt_decomposition(in1, scale_count, scale_adjust=0, mode='ser', core_count=2, store_smoothed=False, store_on_gpu=False): """ This function serves as a handler for the different implementations of the IUWT decomposition. It allows the different methods to be used almost interchangeably. INPUTS: in1 (no default): Array on which the decomposition is to be performed. scale_count (no default): Maximum scale to be considered. scale_adjust (default=0): Adjustment to scale value if first scales are of no interest. mode (default='ser'): Implementation of the IUWT to be used - 'ser', 'mp' or 'gpu'. core_count (default=1): Additional option for multiprocessing - specifies core count. store_smoothed (default=False): Boolean specifier for whether the smoothed image is stored or not. store_on_gpu (default=False): Boolean specifier for whether the decomposition is stored on the gpu or not. OUTPUTS: Returns the decomposition with the additional smoothed coefficients if specified. """ if mode=='ser': return ser_iuwt_decomposition(in1, scale_count, scale_adjust, store_smoothed) elif mode=='mp': return mp_iuwt_decomposition(in1, scale_count, scale_adjust, store_smoothed, core_count) elif mode=='gpu': return gpu_iuwt_decomposition(in1, scale_count, scale_adjust, store_smoothed, store_on_gpu)
[ "def", "iuwt_decomposition", "(", "in1", ",", "scale_count", ",", "scale_adjust", "=", "0", ",", "mode", "=", "'ser'", ",", "core_count", "=", "2", ",", "store_smoothed", "=", "False", ",", "store_on_gpu", "=", "False", ")", ":", "if", "mode", "==", "'ser'", ":", "return", "ser_iuwt_decomposition", "(", "in1", ",", "scale_count", ",", "scale_adjust", ",", "store_smoothed", ")", "elif", "mode", "==", "'mp'", ":", "return", "mp_iuwt_decomposition", "(", "in1", ",", "scale_count", ",", "scale_adjust", ",", "store_smoothed", ",", "core_count", ")", "elif", "mode", "==", "'gpu'", ":", "return", "gpu_iuwt_decomposition", "(", "in1", ",", "scale_count", ",", "scale_adjust", ",", "store_smoothed", ",", "store_on_gpu", ")" ]
59.72
41.32
def _find_resources(resource, name=None, resource_id=None, tags=None, region=None, key=None, keyid=None, profile=None): ''' Get VPC resources based on resource type and name, id, or tags. ''' if all((resource_id, name)): raise SaltInvocationError('Only one of name or id may be ' 'provided.') if not any((resource_id, name, tags)): raise SaltInvocationError('At least one of the following must be ' 'provided: id, name, or tags.') conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) f = 'get_all_{0}'.format(resource) if not f.endswith('s'): f = f + 's' get_resources = getattr(conn, f) filter_parameters = {} if name: filter_parameters['filters'] = {'tag:Name': name} if resource_id: filter_parameters['{0}_ids'.format(resource)] = resource_id if tags: for tag_name, tag_value in six.iteritems(tags): filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value try: r = get_resources(**filter_parameters) except BotoServerError as e: if e.code.endswith('.NotFound'): return None raise return r
[ "def", "_find_resources", "(", "resource", ",", "name", "=", "None", ",", "resource_id", "=", "None", ",", "tags", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "all", "(", "(", "resource_id", ",", "name", ")", ")", ":", "raise", "SaltInvocationError", "(", "'Only one of name or id may be '", "'provided.'", ")", "if", "not", "any", "(", "(", "resource_id", ",", "name", ",", "tags", ")", ")", ":", "raise", "SaltInvocationError", "(", "'At least one of the following must be '", "'provided: id, name, or tags.'", ")", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "f", "=", "'get_all_{0}'", ".", "format", "(", "resource", ")", "if", "not", "f", ".", "endswith", "(", "'s'", ")", ":", "f", "=", "f", "+", "'s'", "get_resources", "=", "getattr", "(", "conn", ",", "f", ")", "filter_parameters", "=", "{", "}", "if", "name", ":", "filter_parameters", "[", "'filters'", "]", "=", "{", "'tag:Name'", ":", "name", "}", "if", "resource_id", ":", "filter_parameters", "[", "'{0}_ids'", ".", "format", "(", "resource", ")", "]", "=", "resource_id", "if", "tags", ":", "for", "tag_name", ",", "tag_value", "in", "six", ".", "iteritems", "(", "tags", ")", ":", "filter_parameters", "[", "'filters'", "]", "[", "'tag:{0}'", ".", "format", "(", "tag_name", ")", "]", "=", "tag_value", "try", ":", "r", "=", "get_resources", "(", "*", "*", "filter_parameters", ")", "except", "BotoServerError", "as", "e", ":", "if", "e", ".", "code", ".", "endswith", "(", "'.NotFound'", ")", ":", "return", "None", "raise", "return", "r" ]
33.513514
23.621622
def admin_view_reverse_fk_links(modeladmin: ModelAdmin, obj, reverse_fk_set_field: str, missing: str = "(None)", use_str: bool = True, separator: str = "<br>", view_type: str = "change", current_app: str = None) -> str: """ Get multiple Django admin site URL for multiple objects linked to our object of interest (where the other objects have foreign keys to our object). """ if not hasattr(obj, reverse_fk_set_field): return missing linked_objs = getattr(obj, reverse_fk_set_field).all() if not linked_objs: return missing first = linked_objs[0] app_name = first._meta.app_label.lower() model_name = first._meta.object_name.lower() viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) if current_app is None: current_app = modeladmin.admin_site.name links = [] for linked_obj in linked_objs: # log.debug("linked_obj: {}", linked_obj) url = reverse(viewname, args=[linked_obj.pk], current_app=current_app) if use_str: label = escape(str(linked_obj)) else: label = "{} {}".format(escape(linked_obj._meta.object_name), linked_obj.pk) links.append('<a href="{}">{}</a>'.format(url, label)) # log.debug("links: {}", links) return separator.join(links)
[ "def", "admin_view_reverse_fk_links", "(", "modeladmin", ":", "ModelAdmin", ",", "obj", ",", "reverse_fk_set_field", ":", "str", ",", "missing", ":", "str", "=", "\"(None)\"", ",", "use_str", ":", "bool", "=", "True", ",", "separator", ":", "str", "=", "\"<br>\"", ",", "view_type", ":", "str", "=", "\"change\"", ",", "current_app", ":", "str", "=", "None", ")", "->", "str", ":", "if", "not", "hasattr", "(", "obj", ",", "reverse_fk_set_field", ")", ":", "return", "missing", "linked_objs", "=", "getattr", "(", "obj", ",", "reverse_fk_set_field", ")", ".", "all", "(", ")", "if", "not", "linked_objs", ":", "return", "missing", "first", "=", "linked_objs", "[", "0", "]", "app_name", "=", "first", ".", "_meta", ".", "app_label", ".", "lower", "(", ")", "model_name", "=", "first", ".", "_meta", ".", "object_name", ".", "lower", "(", ")", "viewname", "=", "\"admin:{}_{}_{}\"", ".", "format", "(", "app_name", ",", "model_name", ",", "view_type", ")", "if", "current_app", "is", "None", ":", "current_app", "=", "modeladmin", ".", "admin_site", ".", "name", "links", "=", "[", "]", "for", "linked_obj", "in", "linked_objs", ":", "# log.debug(\"linked_obj: {}\", linked_obj)", "url", "=", "reverse", "(", "viewname", ",", "args", "=", "[", "linked_obj", ".", "pk", "]", ",", "current_app", "=", "current_app", ")", "if", "use_str", ":", "label", "=", "escape", "(", "str", "(", "linked_obj", ")", ")", "else", ":", "label", "=", "\"{} {}\"", ".", "format", "(", "escape", "(", "linked_obj", ".", "_meta", ".", "object_name", ")", ",", "linked_obj", ".", "pk", ")", "links", ".", "append", "(", "'<a href=\"{}\">{}</a>'", ".", "format", "(", "url", ",", "label", ")", ")", "# log.debug(\"links: {}\", links)", "return", "separator", ".", "join", "(", "links", ")" ]
41.675676
16.702703
def startstop_bool(pack): """Make a bool array based on start and stop conditions. pack: pack.ChannelPack instance If there is start conditions but no stop conditions, this is legal, the True section will begin at first start and remain the rest of the array. Likewise, if there is stop conditions but no start condition, the returned array will be all True until the first stop slice, and the rest of the array is set to False. """ b_TRUE = np.ones(pack.rec_cnt) == True # NOQA start_list = pack.conconf.conditions_list('startcond') stop_list = pack.conconf.conditions_list('stopcond') # Pre-check: runflag = 'startstop' if not start_list and not stop_list: return b_TRUE elif not start_list: runflag = 'stoponly' elif not stop_list: runflag = 'start_only' # startb: if runflag == 'stoponly': # all False (dummy assignment) startb = b_TRUE == False # NOQA else: startb = b_TRUE for cond in start_list: startb = startb & pack._mask_array(cond) # stopb: if runflag == 'startonly': # all False (dummy assignment) stopb = b_TRUE == False # NOQA else: stopb = b_TRUE for cond in stop_list: stopb = stopb & pack._mask_array(cond) stopextend = pack.conconf.get_stopextend() return _startstop_bool(startb, stopb, runflag, stopextend)
[ "def", "startstop_bool", "(", "pack", ")", ":", "b_TRUE", "=", "np", ".", "ones", "(", "pack", ".", "rec_cnt", ")", "==", "True", "# NOQA", "start_list", "=", "pack", ".", "conconf", ".", "conditions_list", "(", "'startcond'", ")", "stop_list", "=", "pack", ".", "conconf", ".", "conditions_list", "(", "'stopcond'", ")", "# Pre-check:", "runflag", "=", "'startstop'", "if", "not", "start_list", "and", "not", "stop_list", ":", "return", "b_TRUE", "elif", "not", "start_list", ":", "runflag", "=", "'stoponly'", "elif", "not", "stop_list", ":", "runflag", "=", "'start_only'", "# startb:", "if", "runflag", "==", "'stoponly'", ":", "# all False (dummy assignment)", "startb", "=", "b_TRUE", "==", "False", "# NOQA", "else", ":", "startb", "=", "b_TRUE", "for", "cond", "in", "start_list", ":", "startb", "=", "startb", "&", "pack", ".", "_mask_array", "(", "cond", ")", "# stopb:", "if", "runflag", "==", "'startonly'", ":", "# all False (dummy assignment)", "stopb", "=", "b_TRUE", "==", "False", "# NOQA", "else", ":", "stopb", "=", "b_TRUE", "for", "cond", "in", "stop_list", ":", "stopb", "=", "stopb", "&", "pack", ".", "_mask_array", "(", "cond", ")", "stopextend", "=", "pack", ".", "conconf", ".", "get_stopextend", "(", ")", "return", "_startstop_bool", "(", "startb", ",", "stopb", ",", "runflag", ",", "stopextend", ")" ]
29.3125
19.708333
def is_descriptor_class(desc, include_abstract=False): r"""Check calculatable descriptor class or not. Returns: bool """ return ( isinstance(desc, type) and issubclass(desc, Descriptor) and (True if include_abstract else not inspect.isabstract(desc)) )
[ "def", "is_descriptor_class", "(", "desc", ",", "include_abstract", "=", "False", ")", ":", "return", "(", "isinstance", "(", "desc", ",", "type", ")", "and", "issubclass", "(", "desc", ",", "Descriptor", ")", "and", "(", "True", "if", "include_abstract", "else", "not", "inspect", ".", "isabstract", "(", "desc", ")", ")", ")" ]
24.583333
21.25
def logout(self): """Logout of a vSphere server.""" if self._logged_in is True: self.si.flush_cache() self.sc.sessionManager.Logout() self._logged_in = False
[ "def", "logout", "(", "self", ")", ":", "if", "self", ".", "_logged_in", "is", "True", ":", "self", ".", "si", ".", "flush_cache", "(", ")", "self", ".", "sc", ".", "sessionManager", ".", "Logout", "(", ")", "self", ".", "_logged_in", "=", "False" ]
34
7.166667
def tables_list(self, dataset_name, max_results=0, page_token=None): """Issues a request to retrieve a list of tables. Args: dataset_name: the name of the dataset to enumerate. max_results: an optional maximum number of tables to retrieve. page_token: an optional token to continue the retrieval. Returns: A parsed result object. Raises: Exception if there is an error performing the operation. """ url = Api._ENDPOINT +\ (Api._TABLES_PATH % (dataset_name.project_id, dataset_name.dataset_id, '', '')) args = {} if max_results != 0: args['maxResults'] = max_results if page_token is not None: args['pageToken'] = page_token return google.datalab.utils.Http.request(url, args=args, credentials=self.credentials)
[ "def", "tables_list", "(", "self", ",", "dataset_name", ",", "max_results", "=", "0", ",", "page_token", "=", "None", ")", ":", "url", "=", "Api", ".", "_ENDPOINT", "+", "(", "Api", ".", "_TABLES_PATH", "%", "(", "dataset_name", ".", "project_id", ",", "dataset_name", ".", "dataset_id", ",", "''", ",", "''", ")", ")", "args", "=", "{", "}", "if", "max_results", "!=", "0", ":", "args", "[", "'maxResults'", "]", "=", "max_results", "if", "page_token", "is", "not", "None", ":", "args", "[", "'pageToken'", "]", "=", "page_token", "return", "google", ".", "datalab", ".", "utils", ".", "Http", ".", "request", "(", "url", ",", "args", "=", "args", ",", "credentials", "=", "self", ".", "credentials", ")" ]
35.545455
23
def write(self, notifications): "Connect to the APNS service and send notifications" if not self.factory: log.msg('APNSService write (connecting)') server, port = ((APNS_SERVER_SANDBOX_HOSTNAME if self.environment == 'sandbox' else APNS_SERVER_HOSTNAME), APNS_SERVER_PORT) self.factory = self.clientProtocolFactory() context = self.getContextFactory() reactor.connectSSL(server, port, self.factory, context) client = self.factory.clientProtocol if client: return client.sendMessage(notifications) else: d = self.factory.deferred timeout = reactor.callLater(self.timeout, lambda: d.called or d.errback( Exception('Notification timed out after %i seconds' % self.timeout))) def cancel_timeout(r): try: timeout.cancel() except: pass return r d.addCallback(lambda p: p.sendMessage(notifications)) d.addErrback(log_errback('apns-service-write')) d.addBoth(cancel_timeout) return d
[ "def", "write", "(", "self", ",", "notifications", ")", ":", "if", "not", "self", ".", "factory", ":", "log", ".", "msg", "(", "'APNSService write (connecting)'", ")", "server", ",", "port", "=", "(", "(", "APNS_SERVER_SANDBOX_HOSTNAME", "if", "self", ".", "environment", "==", "'sandbox'", "else", "APNS_SERVER_HOSTNAME", ")", ",", "APNS_SERVER_PORT", ")", "self", ".", "factory", "=", "self", ".", "clientProtocolFactory", "(", ")", "context", "=", "self", ".", "getContextFactory", "(", ")", "reactor", ".", "connectSSL", "(", "server", ",", "port", ",", "self", ".", "factory", ",", "context", ")", "client", "=", "self", ".", "factory", ".", "clientProtocol", "if", "client", ":", "return", "client", ".", "sendMessage", "(", "notifications", ")", "else", ":", "d", "=", "self", ".", "factory", ".", "deferred", "timeout", "=", "reactor", ".", "callLater", "(", "self", ".", "timeout", ",", "lambda", ":", "d", ".", "called", "or", "d", ".", "errback", "(", "Exception", "(", "'Notification timed out after %i seconds'", "%", "self", ".", "timeout", ")", ")", ")", "def", "cancel_timeout", "(", "r", ")", ":", "try", ":", "timeout", ".", "cancel", "(", ")", "except", ":", "pass", "return", "r", "d", ".", "addCallback", "(", "lambda", "p", ":", "p", ".", "sendMessage", "(", "notifications", ")", ")", "d", ".", "addErrback", "(", "log_errback", "(", "'apns-service-write'", ")", ")", "d", ".", "addBoth", "(", "cancel_timeout", ")", "return", "d" ]
37.571429
16.071429
def clicky(parser, token): """ Clicky tracking template tag. Renders Javascript code to track page visits. You must supply your Clicky Site ID (as a string) in the ``CLICKY_SITE_ID`` setting. """ bits = token.split_contents() if len(bits) > 1: raise TemplateSyntaxError("'%s' takes no arguments" % bits[0]) return ClickyNode()
[ "def", "clicky", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", ">", "1", ":", "raise", "TemplateSyntaxError", "(", "\"'%s' takes no arguments\"", "%", "bits", "[", "0", "]", ")", "return", "ClickyNode", "(", ")" ]
30.083333
17.583333
def signal_transmit(self, fd): """ Awake one process waiting to transmit data on fd """ connections = self.connections if connections(fd) and self.rwait[connections(fd)]: procid = random.sample(self.rwait[connections(fd)], 1)[0] self.awake(procid)
[ "def", "signal_transmit", "(", "self", ",", "fd", ")", ":", "connections", "=", "self", ".", "connections", "if", "connections", "(", "fd", ")", "and", "self", ".", "rwait", "[", "connections", "(", "fd", ")", "]", ":", "procid", "=", "random", ".", "sample", "(", "self", ".", "rwait", "[", "connections", "(", "fd", ")", "]", ",", "1", ")", "[", "0", "]", "self", ".", "awake", "(", "procid", ")" ]
48.333333
11.666667
def getpolicyid(self, fuzzy=True, language=None, windowsversion=None): """ Return an identification string which can be used to find a policy. This string is a combination of the manifest's processorArchitecture, major and minor version, name, publicKeyToken and language. Arguments: fuzzy (boolean) - If False, insert the full version in the id string. Default is True (omit). windowsversion - If not specified (or None), default to (tuple or list of integers) sys.getwindowsversion(). """ if not self.name: logger.warn("Assembly metadata incomplete") return "" id = [] if self.processorArchitecture: id.append(self.processorArchitecture) name = [] name.append("policy") if self.version: name.append(str(self.version[0])) name.append(str(self.version[1])) name.append(self.name) id.append(".".join(name)) if self.publicKeyToken: id.append(self.publicKeyToken) if self.version and (windowsversion or sys.getwindowsversion()) >= (6, ): # Vista and later if fuzzy: id.append("*") else: id.append(".".join([str(i) for i in self.version])) if not language: language = self.getlanguage(windowsversion=windowsversion) if language: id.append(language) id.append("*") id = "_".join(id) if self.version and (windowsversion or sys.getwindowsversion()) < (6, ): # Windows XP if fuzzy: id = os.path.join(id, "*") else: id = os.path.join(id, ".".join([str(i) for i in self.version])) return id
[ "def", "getpolicyid", "(", "self", ",", "fuzzy", "=", "True", ",", "language", "=", "None", ",", "windowsversion", "=", "None", ")", ":", "if", "not", "self", ".", "name", ":", "logger", ".", "warn", "(", "\"Assembly metadata incomplete\"", ")", "return", "\"\"", "id", "=", "[", "]", "if", "self", ".", "processorArchitecture", ":", "id", ".", "append", "(", "self", ".", "processorArchitecture", ")", "name", "=", "[", "]", "name", ".", "append", "(", "\"policy\"", ")", "if", "self", ".", "version", ":", "name", ".", "append", "(", "str", "(", "self", ".", "version", "[", "0", "]", ")", ")", "name", ".", "append", "(", "str", "(", "self", ".", "version", "[", "1", "]", ")", ")", "name", ".", "append", "(", "self", ".", "name", ")", "id", ".", "append", "(", "\".\"", ".", "join", "(", "name", ")", ")", "if", "self", ".", "publicKeyToken", ":", "id", ".", "append", "(", "self", ".", "publicKeyToken", ")", "if", "self", ".", "version", "and", "(", "windowsversion", "or", "sys", ".", "getwindowsversion", "(", ")", ")", ">=", "(", "6", ",", ")", ":", "# Vista and later", "if", "fuzzy", ":", "id", ".", "append", "(", "\"*\"", ")", "else", ":", "id", ".", "append", "(", "\".\"", ".", "join", "(", "[", "str", "(", "i", ")", "for", "i", "in", "self", ".", "version", "]", ")", ")", "if", "not", "language", ":", "language", "=", "self", ".", "getlanguage", "(", "windowsversion", "=", "windowsversion", ")", "if", "language", ":", "id", ".", "append", "(", "language", ")", "id", ".", "append", "(", "\"*\"", ")", "id", "=", "\"_\"", ".", "join", "(", "id", ")", "if", "self", ".", "version", "and", "(", "windowsversion", "or", "sys", ".", "getwindowsversion", "(", ")", ")", "<", "(", "6", ",", ")", ":", "# Windows XP", "if", "fuzzy", ":", "id", "=", "os", ".", "path", ".", "join", "(", "id", ",", "\"*\"", ")", "else", ":", "id", "=", "os", ".", "path", ".", "join", "(", "id", ",", "\".\"", ".", "join", "(", "[", "str", "(", "i", ")", "for", "i", "in", "self", ".", "version", "]", ")", ")", "return", "id" ]
38.583333
20
def authorization_target(original_class): """ Add bouncer goodness to the model. This is a class decorator, when added to your User model if will add ``can`` and ``cannot`` methods to the class :param original_class: the User class to be decorated """ def can(self, action, subject): ability = Ability(self, get_authorization_method()) return ability.can(action, subject) def cannot(self, action, subject): return not can(self, action, subject) setattr(original_class, 'can', can) setattr(original_class, 'cannot', cannot) return original_class
[ "def", "authorization_target", "(", "original_class", ")", ":", "def", "can", "(", "self", ",", "action", ",", "subject", ")", ":", "ability", "=", "Ability", "(", "self", ",", "get_authorization_method", "(", ")", ")", "return", "ability", ".", "can", "(", "action", ",", "subject", ")", "def", "cannot", "(", "self", ",", "action", ",", "subject", ")", ":", "return", "not", "can", "(", "self", ",", "action", ",", "subject", ")", "setattr", "(", "original_class", ",", "'can'", ",", "can", ")", "setattr", "(", "original_class", ",", "'cannot'", ",", "cannot", ")", "return", "original_class" ]
35.176471
14.411765
def download(self, target_relpath, download_in_toto_metadata=True): ''' Returns: If download over TUF and in-toto is successful, this function will return the complete filepath to the desired target. ''' return self.__get_target(target_relpath, download_in_toto_metadata=download_in_toto_metadata)
[ "def", "download", "(", "self", ",", "target_relpath", ",", "download_in_toto_metadata", "=", "True", ")", ":", "return", "self", ".", "__get_target", "(", "target_relpath", ",", "download_in_toto_metadata", "=", "download_in_toto_metadata", ")" ]
49.571429
33
def get_variables_by_offset(self, start): """ Find variables covering the given region offset. :param int start: :return: A list of stack variables. :rtype: set """ _, container = self._get_container(start) if container is None: return [] else: return container.internal_objects
[ "def", "get_variables_by_offset", "(", "self", ",", "start", ")", ":", "_", ",", "container", "=", "self", ".", "_get_container", "(", "start", ")", "if", "container", "is", "None", ":", "return", "[", "]", "else", ":", "return", "container", ".", "internal_objects" ]
26
14.714286
def p_stmt_list(p): """ stmt_list : stmt | stmt_list stmt """ if len(p) == 2: p[0] = node.stmt_list([p[1]] if p[1] else []) elif len(p) == 3: p[0] = p[1] if p[2]: p[0].append(p[2]) else: assert 0
[ "def", "p_stmt_list", "(", "p", ")", ":", "if", "len", "(", "p", ")", "==", "2", ":", "p", "[", "0", "]", "=", "node", ".", "stmt_list", "(", "[", "p", "[", "1", "]", "]", "if", "p", "[", "1", "]", "else", "[", "]", ")", "elif", "len", "(", "p", ")", "==", "3", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "if", "p", "[", "2", "]", ":", "p", "[", "0", "]", ".", "append", "(", "p", "[", "2", "]", ")", "else", ":", "assert", "0" ]
20.384615
16.538462
def validate(self): """ Verify that the contents of the PublicKey object are valid. Raises: TypeError: if the types of any PublicKey attributes are invalid. """ if not isinstance(self.value, bytes): raise TypeError("key value must be bytes") elif not isinstance(self.cryptographic_algorithm, enums.CryptographicAlgorithm): raise TypeError("key algorithm must be a CryptographicAlgorithm " "enumeration") elif not isinstance(self.cryptographic_length, six.integer_types): raise TypeError("key length must be an integer") elif not isinstance(self.key_format_type, enums.KeyFormatType): raise TypeError("key format type must be a KeyFormatType " "enumeration") elif self.key_format_type not in self._valid_formats: raise ValueError("key format type must be one of {0}".format( self._valid_formats)) # TODO (peter-hamilton) Verify that the key bytes match the key format mask_count = len(self.cryptographic_usage_masks) for i in range(mask_count): mask = self.cryptographic_usage_masks[i] if not isinstance(mask, enums.CryptographicUsageMask): position = "({0} in list)".format(i) raise TypeError( "key mask {0} must be a CryptographicUsageMask " "enumeration".format(position)) name_count = len(self.names) for i in range(name_count): name = self.names[i] if not isinstance(name, six.string_types): position = "({0} in list)".format(i) raise TypeError("key name {0} must be a string".format( position))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "value", ",", "bytes", ")", ":", "raise", "TypeError", "(", "\"key value must be bytes\"", ")", "elif", "not", "isinstance", "(", "self", ".", "cryptographic_algorithm", ",", "enums", ".", "CryptographicAlgorithm", ")", ":", "raise", "TypeError", "(", "\"key algorithm must be a CryptographicAlgorithm \"", "\"enumeration\"", ")", "elif", "not", "isinstance", "(", "self", ".", "cryptographic_length", ",", "six", ".", "integer_types", ")", ":", "raise", "TypeError", "(", "\"key length must be an integer\"", ")", "elif", "not", "isinstance", "(", "self", ".", "key_format_type", ",", "enums", ".", "KeyFormatType", ")", ":", "raise", "TypeError", "(", "\"key format type must be a KeyFormatType \"", "\"enumeration\"", ")", "elif", "self", ".", "key_format_type", "not", "in", "self", ".", "_valid_formats", ":", "raise", "ValueError", "(", "\"key format type must be one of {0}\"", ".", "format", "(", "self", ".", "_valid_formats", ")", ")", "# TODO (peter-hamilton) Verify that the key bytes match the key format", "mask_count", "=", "len", "(", "self", ".", "cryptographic_usage_masks", ")", "for", "i", "in", "range", "(", "mask_count", ")", ":", "mask", "=", "self", ".", "cryptographic_usage_masks", "[", "i", "]", "if", "not", "isinstance", "(", "mask", ",", "enums", ".", "CryptographicUsageMask", ")", ":", "position", "=", "\"({0} in list)\"", ".", "format", "(", "i", ")", "raise", "TypeError", "(", "\"key mask {0} must be a CryptographicUsageMask \"", "\"enumeration\"", ".", "format", "(", "position", ")", ")", "name_count", "=", "len", "(", "self", ".", "names", ")", "for", "i", "in", "range", "(", "name_count", ")", ":", "name", "=", "self", ".", "names", "[", "i", "]", "if", "not", "isinstance", "(", "name", ",", "six", ".", "string_types", ")", ":", "position", "=", "\"({0} in list)\"", ".", "format", "(", "i", ")", "raise", "TypeError", "(", "\"key name {0} must be a string\"", ".", "format", "(", "position", ")", ")" ]
45.5
19.4
def remove(self, *tokens: str) -> None: """Remove tokens from list.""" from wdom.web_node import WdomElement _removed_tokens = [] for token in tokens: self._validate_token(token) if token in self: self._list.remove(token) _removed_tokens.append(token) if isinstance(self._owner, WdomElement) and _removed_tokens: self._owner.js_exec('removeClass', _removed_tokens)
[ "def", "remove", "(", "self", ",", "*", "tokens", ":", "str", ")", "->", "None", ":", "from", "wdom", ".", "web_node", "import", "WdomElement", "_removed_tokens", "=", "[", "]", "for", "token", "in", "tokens", ":", "self", ".", "_validate_token", "(", "token", ")", "if", "token", "in", "self", ":", "self", ".", "_list", ".", "remove", "(", "token", ")", "_removed_tokens", ".", "append", "(", "token", ")", "if", "isinstance", "(", "self", ".", "_owner", ",", "WdomElement", ")", "and", "_removed_tokens", ":", "self", ".", "_owner", ".", "js_exec", "(", "'removeClass'", ",", "_removed_tokens", ")" ]
42
8.909091
def components(arg): """Converts a dict of components to the format expected by the Google Maps server. For example: c = {"country": "US", "postal_code": "94043"} convert.components(c) # 'country:US|postal_code:94043' :param arg: The component filter. :type arg: dict :rtype: basestring """ # Components may have multiple values per type, here we # expand them into individual key/value items, eg: # {"country": ["US", "AU"], "foo": 1} -> "country:AU", "country:US", "foo:1" def expand(arg): for k, v in arg.items(): for item in as_list(v): yield "%s:%s" % (k, item) if isinstance(arg, dict): return "|".join(sorted(expand(arg))) raise TypeError( "Expected a dict for components, " "but got %s" % type(arg).__name__)
[ "def", "components", "(", "arg", ")", ":", "# Components may have multiple values per type, here we", "# expand them into individual key/value items, eg:", "# {\"country\": [\"US\", \"AU\"], \"foo\": 1} -> \"country:AU\", \"country:US\", \"foo:1\"", "def", "expand", "(", "arg", ")", ":", "for", "k", ",", "v", "in", "arg", ".", "items", "(", ")", ":", "for", "item", "in", "as_list", "(", "v", ")", ":", "yield", "\"%s:%s\"", "%", "(", "k", ",", "item", ")", "if", "isinstance", "(", "arg", ",", "dict", ")", ":", "return", "\"|\"", ".", "join", "(", "sorted", "(", "expand", "(", "arg", ")", ")", ")", "raise", "TypeError", "(", "\"Expected a dict for components, \"", "\"but got %s\"", "%", "type", "(", "arg", ")", ".", "__name__", ")" ]
28.206897
18.241379
def set_cookie(self, name: str, value: str, *, expires: Optional[str]=None, domain: Optional[str]=None, max_age: Optional[Union[int, str]]=None, path: str='/', secure: Optional[str]=None, httponly: Optional[str]=None, version: Optional[str]=None) -> None: """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = str(max_age) elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version
[ "def", "set_cookie", "(", "self", ",", "name", ":", "str", ",", "value", ":", "str", ",", "*", ",", "expires", ":", "Optional", "[", "str", "]", "=", "None", ",", "domain", ":", "Optional", "[", "str", "]", "=", "None", ",", "max_age", ":", "Optional", "[", "Union", "[", "int", ",", "str", "]", "]", "=", "None", ",", "path", ":", "str", "=", "'/'", ",", "secure", ":", "Optional", "[", "str", "]", "=", "None", ",", "httponly", ":", "Optional", "[", "str", "]", "=", "None", ",", "version", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "None", ":", "old", "=", "self", ".", "_cookies", ".", "get", "(", "name", ")", "if", "old", "is", "not", "None", "and", "old", ".", "coded_value", "==", "''", ":", "# deleted cookie", "self", ".", "_cookies", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "_cookies", "[", "name", "]", "=", "value", "c", "=", "self", ".", "_cookies", "[", "name", "]", "if", "expires", "is", "not", "None", ":", "c", "[", "'expires'", "]", "=", "expires", "elif", "c", ".", "get", "(", "'expires'", ")", "==", "'Thu, 01 Jan 1970 00:00:00 GMT'", ":", "del", "c", "[", "'expires'", "]", "if", "domain", "is", "not", "None", ":", "c", "[", "'domain'", "]", "=", "domain", "if", "max_age", "is", "not", "None", ":", "c", "[", "'max-age'", "]", "=", "str", "(", "max_age", ")", "elif", "'max-age'", "in", "c", ":", "del", "c", "[", "'max-age'", "]", "c", "[", "'path'", "]", "=", "path", "if", "secure", "is", "not", "None", ":", "c", "[", "'secure'", "]", "=", "secure", "if", "httponly", "is", "not", "None", ":", "c", "[", "'httponly'", "]", "=", "httponly", "if", "version", "is", "not", "None", ":", "c", "[", "'version'", "]", "=", "version" ]
31.162791
14.883721
def _clear_inspect(self): """Clears inspect attributes when re-executing a pipeline""" self.trace_info = defaultdict(list) self.process_tags = {} self.process_stats = {} self.samples = [] self.stored_ids = [] self.stored_log_ids = [] self.time_start = None self.time_stop = None self.execution_command = None self.nextflow_version = None self.abort_cause = None self._c = 0 # Clean up of tag running status for p in self.processes.values(): p["barrier"] = "W" for i in ["submitted", "finished", "failed", "retry"]: p[i] = set()
[ "def", "_clear_inspect", "(", "self", ")", ":", "self", ".", "trace_info", "=", "defaultdict", "(", "list", ")", "self", ".", "process_tags", "=", "{", "}", "self", ".", "process_stats", "=", "{", "}", "self", ".", "samples", "=", "[", "]", "self", ".", "stored_ids", "=", "[", "]", "self", ".", "stored_log_ids", "=", "[", "]", "self", ".", "time_start", "=", "None", "self", ".", "time_stop", "=", "None", "self", ".", "execution_command", "=", "None", "self", ".", "nextflow_version", "=", "None", "self", ".", "abort_cause", "=", "None", "self", ".", "_c", "=", "0", "# Clean up of tag running status", "for", "p", "in", "self", ".", "processes", ".", "values", "(", ")", ":", "p", "[", "\"barrier\"", "]", "=", "\"W\"", "for", "i", "in", "[", "\"submitted\"", ",", "\"finished\"", ",", "\"failed\"", ",", "\"retry\"", "]", ":", "p", "[", "i", "]", "=", "set", "(", ")" ]
33.45
10.95
def transform(self, crs): """ Transforms BBoxCollection from current CRS to target CRS :param crs: target CRS :type crs: constants.CRS :return: BBoxCollection in target CRS :rtype: BBoxCollection """ return BBoxCollection([bbox.transform(crs) for bbox in self.bbox_list])
[ "def", "transform", "(", "self", ",", "crs", ")", ":", "return", "BBoxCollection", "(", "[", "bbox", ".", "transform", "(", "crs", ")", "for", "bbox", "in", "self", ".", "bbox_list", "]", ")" ]
35.555556
14.111111
def iterable_source(iterable, target): """Convert an iterable into a stream of events. Args: iterable: A series of items which will be sent to the target one by one. target: The target coroutine or sink. Returns: An iterator over any remaining items. """ it = iter(iterable) for item in it: try: target.send(item) except StopIteration: return prepend(item, it) return empty_iter()
[ "def", "iterable_source", "(", "iterable", ",", "target", ")", ":", "it", "=", "iter", "(", "iterable", ")", "for", "item", "in", "it", ":", "try", ":", "target", ".", "send", "(", "item", ")", "except", "StopIteration", ":", "return", "prepend", "(", "item", ",", "it", ")", "return", "empty_iter", "(", ")" ]
26.941176
17.647059
def __check_extension(files, ignore_globs=None, include_globs=None): """ Internal method to filter a list of file changes by extension and ignore_dirs. :param files: :param ignore_globs: a list of globs to ignore (if none falls back to extensions and ignore_dir) :param include_globs: a list of globs to include (if none, includes all). :return: dict """ if include_globs is None or include_globs == []: include_globs = ['*'] out = {} for key in files.keys(): # count up the number of patterns in the ignore globs list that match if ignore_globs is not None: count_exclude = sum([1 if fnmatch.fnmatch(key, g) else 0 for g in ignore_globs]) else: count_exclude = 0 # count up the number of patterns in the include globs list that match count_include = sum([1 if fnmatch.fnmatch(key, g) else 0 for g in include_globs]) # if we have one vote or more to include and none to exclude, then we use the file. if count_include > 0 and count_exclude == 0: out[key] = files[key] return out
[ "def", "__check_extension", "(", "files", ",", "ignore_globs", "=", "None", ",", "include_globs", "=", "None", ")", ":", "if", "include_globs", "is", "None", "or", "include_globs", "==", "[", "]", ":", "include_globs", "=", "[", "'*'", "]", "out", "=", "{", "}", "for", "key", "in", "files", ".", "keys", "(", ")", ":", "# count up the number of patterns in the ignore globs list that match", "if", "ignore_globs", "is", "not", "None", ":", "count_exclude", "=", "sum", "(", "[", "1", "if", "fnmatch", ".", "fnmatch", "(", "key", ",", "g", ")", "else", "0", "for", "g", "in", "ignore_globs", "]", ")", "else", ":", "count_exclude", "=", "0", "# count up the number of patterns in the include globs list that match", "count_include", "=", "sum", "(", "[", "1", "if", "fnmatch", ".", "fnmatch", "(", "key", ",", "g", ")", "else", "0", "for", "g", "in", "include_globs", "]", ")", "# if we have one vote or more to include and none to exclude, then we use the file.", "if", "count_include", ">", "0", "and", "count_exclude", "==", "0", ":", "out", "[", "key", "]", "=", "files", "[", "key", "]", "return", "out" ]
40.965517
28.62069
def job_task(self, application_id, job_id, task_id): """ A Task resource contains information about a particular task within a job. :param str application_id: The application id :param str job_id: The job id :param str task_id: The task id :returns: API response object with JSON data :rtype: :py:class:`yarn_api_client.base.Response` """ path = '/proxy/{appid}/ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}'.format( appid=application_id, jobid=job_id, taskid=task_id) return self.request(path)
[ "def", "job_task", "(", "self", ",", "application_id", ",", "job_id", ",", "task_id", ")", ":", "path", "=", "'/proxy/{appid}/ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}'", ".", "format", "(", "appid", "=", "application_id", ",", "jobid", "=", "job_id", ",", "taskid", "=", "task_id", ")", "return", "self", ".", "request", "(", "path", ")" ]
38.666667
16.533333
def show_error_dialog(self, message, details=None): """ Convenience method for showing an error dialog. """ dlg = Gtk.MessageDialog(type=Gtk.MessageType.ERROR, buttons=Gtk.ButtonsType.OK, message_format=message) if details is not None: dlg.format_secondary_text(details) dlg.run() dlg.destroy()
[ "def", "show_error_dialog", "(", "self", ",", "message", ",", "details", "=", "None", ")", ":", "dlg", "=", "Gtk", ".", "MessageDialog", "(", "type", "=", "Gtk", ".", "MessageType", ".", "ERROR", ",", "buttons", "=", "Gtk", ".", "ButtonsType", ".", "OK", ",", "message_format", "=", "message", ")", "if", "details", "is", "not", "None", ":", "dlg", ".", "format_secondary_text", "(", "details", ")", "dlg", ".", "run", "(", ")", "dlg", ".", "destroy", "(", ")" ]
38.6
14.6
def columnCount(self, parent): """ Returns the number of columns for the children of the given parent. """ # print("columnCount:",self) if parent.isValid(): return parent.internalPointer().columnCount() else: return self.root.columnCount()
[ "def", "columnCount", "(", "self", ",", "parent", ")", ":", "# print(\"columnCount:\",self)", "if", "parent", ".", "isValid", "(", ")", ":", "return", "parent", ".", "internalPointer", "(", ")", ".", "columnCount", "(", ")", "else", ":", "return", "self", ".", "root", ".", "columnCount", "(", ")" ]
37
9
def _read_requirements(filename, extra_packages): """Returns a list of package requirements read from the file.""" requirements_file = open(filename).read() hard_requirements = [] for line in requirements_file.splitlines(): if _is_requirement(line): if line.find(';') > -1: dep, condition = tuple(line.split(';')) extra_packages[condition.strip()].append(dep.strip()) else: hard_requirements.append(line.strip()) return hard_requirements, extra_packages
[ "def", "_read_requirements", "(", "filename", ",", "extra_packages", ")", ":", "requirements_file", "=", "open", "(", "filename", ")", ".", "read", "(", ")", "hard_requirements", "=", "[", "]", "for", "line", "in", "requirements_file", ".", "splitlines", "(", ")", ":", "if", "_is_requirement", "(", "line", ")", ":", "if", "line", ".", "find", "(", "';'", ")", ">", "-", "1", ":", "dep", ",", "condition", "=", "tuple", "(", "line", ".", "split", "(", "';'", ")", ")", "extra_packages", "[", "condition", ".", "strip", "(", ")", "]", ".", "append", "(", "dep", ".", "strip", "(", ")", ")", "else", ":", "hard_requirements", ".", "append", "(", "line", ".", "strip", "(", ")", ")", "return", "hard_requirements", ",", "extra_packages" ]
41.692308
13.230769
def get_password(vm_): r''' Return the password to use for a VM. vm\_ The configuration to obtain the password from. ''' return config.get_cloud_config_value( 'password', vm_, __opts__, default=config.get_cloud_config_value( 'passwd', vm_, __opts__, search_global=False ), search_global=False )
[ "def", "get_password", "(", "vm_", ")", ":", "return", "config", ".", "get_cloud_config_value", "(", "'password'", ",", "vm_", ",", "__opts__", ",", "default", "=", "config", ".", "get_cloud_config_value", "(", "'passwd'", ",", "vm_", ",", "__opts__", ",", "search_global", "=", "False", ")", ",", "search_global", "=", "False", ")" ]
24.6
18.2
def _BuildStreamingResponse(self, binary_stream, method_name=None): """Builds HTTPResponse object for streaming.""" precondition.AssertType(method_name, Text) # We get a first chunk of the output stream. This way the likelihood # of catching an exception that may happen during response generation # is much higher. content = binary_stream.GenerateContent() try: peek = content.next() stream = itertools.chain([peek], content) except StopIteration: stream = [] response = werkzeug_wrappers.Response( response=stream, content_type="binary/octet-stream", direct_passthrough=True) response.headers["Content-Disposition"] = (( "attachment; filename=%s" % binary_stream.filename).encode("utf-8")) if method_name: response.headers["X-API-Method"] = method_name.encode("utf-8") if binary_stream.content_length: response.content_length = binary_stream.content_length return response
[ "def", "_BuildStreamingResponse", "(", "self", ",", "binary_stream", ",", "method_name", "=", "None", ")", ":", "precondition", ".", "AssertType", "(", "method_name", ",", "Text", ")", "# We get a first chunk of the output stream. This way the likelihood", "# of catching an exception that may happen during response generation", "# is much higher.", "content", "=", "binary_stream", ".", "GenerateContent", "(", ")", "try", ":", "peek", "=", "content", ".", "next", "(", ")", "stream", "=", "itertools", ".", "chain", "(", "[", "peek", "]", ",", "content", ")", "except", "StopIteration", ":", "stream", "=", "[", "]", "response", "=", "werkzeug_wrappers", ".", "Response", "(", "response", "=", "stream", ",", "content_type", "=", "\"binary/octet-stream\"", ",", "direct_passthrough", "=", "True", ")", "response", ".", "headers", "[", "\"Content-Disposition\"", "]", "=", "(", "(", "\"attachment; filename=%s\"", "%", "binary_stream", ".", "filename", ")", ".", "encode", "(", "\"utf-8\"", ")", ")", "if", "method_name", ":", "response", ".", "headers", "[", "\"X-API-Method\"", "]", "=", "method_name", ".", "encode", "(", "\"utf-8\"", ")", "if", "binary_stream", ".", "content_length", ":", "response", ".", "content_length", "=", "binary_stream", ".", "content_length", "return", "response" ]
35.777778
19.962963
def get_chain_details_by_related_pdb_chains(self, pdb_id, chain_id, pfam_accs): ''' Returns a dict of SCOPe details using info This returns Pfam-level information for a PDB chain i.e. no details on the protein, species, or domain will be returned. If there are SCOPe entries for the associated Pfam accession numbers which agree then this function returns pretty complete information. ''' if not pfam_accs: return None associated_pdb_chains = set() pfam_api = self.get_pfam_api() for pfam_acc in pfam_accs: associated_pdb_chains = associated_pdb_chains.union(pfam_api.get_pdb_chains_from_pfam_accession_number(pfam_acc)) hits = [] #class_count = {} pfam_scop_mapping = {} for pdb_chain_pair in associated_pdb_chains: ass_pdb_id, ass_chain_id = pdb_chain_pair[0], pdb_chain_pair[1] hit = self.get_chain_details(ass_pdb_id, chain = ass_chain_id, internal_function_call = True, pfam_scop_mapping = pfam_scop_mapping) if hit and hit.get('chains'): assert(len(hit['chains']) == 1) hits.append(hit['chains'][ass_chain_id]) #for k, v in hit.iteritems(): #class_count[v['sccs']] = class_count.get(v['sccs'], 0) #class_count[v['sccs']] += 1 #print(' %s, %s: %s' % (v['pdb_id'], k, v['sccs'])) #pprint.pprint(class_count) allowed_scop_domains = map(int, map(set.intersection, pfam_scop_mapping.values())[0]) allowed_scop_domains = list(set((allowed_scop_domains or []) + (self.get_sunid_for_pfam_accs(pfam_accs) or []))) filtered_hits = [] print(pfam_accs) print(allowed_scop_domains) print('%d hits' % len(hits)) for hit in hits: domains_to_ignore = [] for k, v in hit['domains'].iteritems(): if v['sunid'] in allowed_scop_domains: filtered_hits.append(v) print('%d filtered_hits' % len(filtered_hits)) if not filtered_hits: return None d = self.get_basic_pdb_chain_information(pdb_id, chain_id) d.update(self.get_common_fields(filtered_hits)) d.update(dict( SCOPe_sources = 'Pfam + SCOPe', SCOPe_search_fields = 'Pfam + link_pdb.pdb_chain_id', SCOPe_trust_level = 3 )) # Add the lowest common classification over all related Pfam families for k, v in sorted(self.levels.iteritems()): d[v] = None d.update(dict(self.get_common_hierarchy(filtered_hits))) return d
[ "def", "get_chain_details_by_related_pdb_chains", "(", "self", ",", "pdb_id", ",", "chain_id", ",", "pfam_accs", ")", ":", "if", "not", "pfam_accs", ":", "return", "None", "associated_pdb_chains", "=", "set", "(", ")", "pfam_api", "=", "self", ".", "get_pfam_api", "(", ")", "for", "pfam_acc", "in", "pfam_accs", ":", "associated_pdb_chains", "=", "associated_pdb_chains", ".", "union", "(", "pfam_api", ".", "get_pdb_chains_from_pfam_accession_number", "(", "pfam_acc", ")", ")", "hits", "=", "[", "]", "#class_count = {}", "pfam_scop_mapping", "=", "{", "}", "for", "pdb_chain_pair", "in", "associated_pdb_chains", ":", "ass_pdb_id", ",", "ass_chain_id", "=", "pdb_chain_pair", "[", "0", "]", ",", "pdb_chain_pair", "[", "1", "]", "hit", "=", "self", ".", "get_chain_details", "(", "ass_pdb_id", ",", "chain", "=", "ass_chain_id", ",", "internal_function_call", "=", "True", ",", "pfam_scop_mapping", "=", "pfam_scop_mapping", ")", "if", "hit", "and", "hit", ".", "get", "(", "'chains'", ")", ":", "assert", "(", "len", "(", "hit", "[", "'chains'", "]", ")", "==", "1", ")", "hits", ".", "append", "(", "hit", "[", "'chains'", "]", "[", "ass_chain_id", "]", ")", "#for k, v in hit.iteritems():", "#class_count[v['sccs']] = class_count.get(v['sccs'], 0)", "#class_count[v['sccs']] += 1", "#print(' %s, %s: %s' % (v['pdb_id'], k, v['sccs']))", "#pprint.pprint(class_count)", "allowed_scop_domains", "=", "map", "(", "int", ",", "map", "(", "set", ".", "intersection", ",", "pfam_scop_mapping", ".", "values", "(", ")", ")", "[", "0", "]", ")", "allowed_scop_domains", "=", "list", "(", "set", "(", "(", "allowed_scop_domains", "or", "[", "]", ")", "+", "(", "self", ".", "get_sunid_for_pfam_accs", "(", "pfam_accs", ")", "or", "[", "]", ")", ")", ")", "filtered_hits", "=", "[", "]", "print", "(", "pfam_accs", ")", "print", "(", "allowed_scop_domains", ")", "print", "(", "'%d hits'", "%", "len", "(", "hits", ")", ")", "for", "hit", "in", "hits", ":", "domains_to_ignore", "=", "[", "]", "for", "k", ",", "v", "in", "hit", "[", "'domains'", "]", ".", "iteritems", "(", ")", ":", "if", "v", "[", "'sunid'", "]", "in", "allowed_scop_domains", ":", "filtered_hits", ".", "append", "(", "v", ")", "print", "(", "'%d filtered_hits'", "%", "len", "(", "filtered_hits", ")", ")", "if", "not", "filtered_hits", ":", "return", "None", "d", "=", "self", ".", "get_basic_pdb_chain_information", "(", "pdb_id", ",", "chain_id", ")", "d", ".", "update", "(", "self", ".", "get_common_fields", "(", "filtered_hits", ")", ")", "d", ".", "update", "(", "dict", "(", "SCOPe_sources", "=", "'Pfam + SCOPe'", ",", "SCOPe_search_fields", "=", "'Pfam + link_pdb.pdb_chain_id'", ",", "SCOPe_trust_level", "=", "3", ")", ")", "# Add the lowest common classification over all related Pfam families", "for", "k", ",", "v", "in", "sorted", "(", "self", ".", "levels", ".", "iteritems", "(", ")", ")", ":", "d", "[", "v", "]", "=", "None", "d", ".", "update", "(", "dict", "(", "self", ".", "get_common_hierarchy", "(", "filtered_hits", ")", ")", ")", "return", "d" ]
44.166667
25.166667
def set_scalar(self, indexer, value): """ Set scalar value into the array. Parameters ---------- indexer : any The indexer to set the value at. value : str The value to assign at the given locations. Raises ------ ValueError Raised when ``value`` is not a value element of this this label array. """ try: value_code = self.reverse_categories[value] except KeyError: raise ValueError("%r is not in LabelArray categories." % value) self.as_int_array()[indexer] = value_code
[ "def", "set_scalar", "(", "self", ",", "indexer", ",", "value", ")", ":", "try", ":", "value_code", "=", "self", ".", "reverse_categories", "[", "value", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"%r is not in LabelArray categories.\"", "%", "value", ")", "self", ".", "as_int_array", "(", ")", "[", "indexer", "]", "=", "value_code" ]
27.304348
20
def ssh_sa_ssh_client_cipher(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ssh_sa = ET.SubElement(config, "ssh-sa", xmlns="urn:brocade.com:mgmt:brocade-sec-services") ssh = ET.SubElement(ssh_sa, "ssh") client = ET.SubElement(ssh, "client") cipher = ET.SubElement(client, "cipher") cipher.text = kwargs.pop('cipher') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "ssh_sa_ssh_client_cipher", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "ssh_sa", "=", "ET", ".", "SubElement", "(", "config", ",", "\"ssh-sa\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-sec-services\"", ")", "ssh", "=", "ET", ".", "SubElement", "(", "ssh_sa", ",", "\"ssh\"", ")", "client", "=", "ET", ".", "SubElement", "(", "ssh", ",", "\"client\"", ")", "cipher", "=", "ET", ".", "SubElement", "(", "client", ",", "\"cipher\"", ")", "cipher", ".", "text", "=", "kwargs", ".", "pop", "(", "'cipher'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
40.583333
12.5
def main(arguments=None): """Command-line entry point. :param arguments: List of strings that contain the command-line arguments. When ``None``, the command-line arguments are looked up in ``sys.argv`` (``sys.argv[0]`` is ignored). :return: This function has no return value. :raise SystemExit: The command-line arguments are invalid. """ # Parse command-line arguments. if arguments is None: arguments = sys.argv[1:] arguments = cli.parse_args(arguments) # Dynamic defaults. logging_endpoint = arguments.logging_endpoint if not logging_endpoint: logging_endpoint = os.environ.get('SMARTMOB_LOGGING_ENDPOINT') if not logging_endpoint: logging_endpoint = 'file:///dev/stdout' # Initialize logger. configure_logging( log_format=arguments.log_format, utc=arguments.utc_timestamps, endpoint=logging_endpoint, ) event_log = structlog.get_logger() # Start the event loop. loop = asyncio.get_event_loop() # Run the agent :-) try: with responder(loop, event_log=event_log, host=arguments.host, port=arguments.port): loop.run_forever() # pragma: no cover except KeyboardInterrupt: event_log.info('stop', reason='ctrl-c')
[ "def", "main", "(", "arguments", "=", "None", ")", ":", "# Parse command-line arguments.", "if", "arguments", "is", "None", ":", "arguments", "=", "sys", ".", "argv", "[", "1", ":", "]", "arguments", "=", "cli", ".", "parse_args", "(", "arguments", ")", "# Dynamic defaults.", "logging_endpoint", "=", "arguments", ".", "logging_endpoint", "if", "not", "logging_endpoint", ":", "logging_endpoint", "=", "os", ".", "environ", ".", "get", "(", "'SMARTMOB_LOGGING_ENDPOINT'", ")", "if", "not", "logging_endpoint", ":", "logging_endpoint", "=", "'file:///dev/stdout'", "# Initialize logger.", "configure_logging", "(", "log_format", "=", "arguments", ".", "log_format", ",", "utc", "=", "arguments", ".", "utc_timestamps", ",", "endpoint", "=", "logging_endpoint", ",", ")", "event_log", "=", "structlog", ".", "get_logger", "(", ")", "# Start the event loop.", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "# Run the agent :-)", "try", ":", "with", "responder", "(", "loop", ",", "event_log", "=", "event_log", ",", "host", "=", "arguments", ".", "host", ",", "port", "=", "arguments", ".", "port", ")", ":", "loop", ".", "run_forever", "(", ")", "# pragma: no cover", "except", "KeyboardInterrupt", ":", "event_log", ".", "info", "(", "'stop'", ",", "reason", "=", "'ctrl-c'", ")" ]
31.658537
16.365854
def describe_unsupported(series, **kwargs): """Compute summary statistics of a unsupported (`S_TYPE_UNSUPPORTED`) variable (a Series). Parameters ---------- series : Series The variable to describe. Returns ------- Series The description of the variable as a Series with index being stats keys. """ leng = len(series) # number of observations in the Series count = series.count() # number of non-NaN observations in the Series n_infinite = count - series.count() # number of infinte observations in the Series results_data = {'count': count, 'p_missing': 1 - count * 1.0 / leng, 'n_missing': leng - count, 'p_infinite': n_infinite * 1.0 / leng, 'n_infinite': n_infinite, 'type': base.S_TYPE_UNSUPPORTED} try: # pandas 0.17 onwards results_data['memorysize'] = series.memory_usage() except: results_data['memorysize'] = 0 return pd.Series(results_data, name=series.name)
[ "def", "describe_unsupported", "(", "series", ",", "*", "*", "kwargs", ")", ":", "leng", "=", "len", "(", "series", ")", "# number of observations in the Series", "count", "=", "series", ".", "count", "(", ")", "# number of non-NaN observations in the Series", "n_infinite", "=", "count", "-", "series", ".", "count", "(", ")", "# number of infinte observations in the Series", "results_data", "=", "{", "'count'", ":", "count", ",", "'p_missing'", ":", "1", "-", "count", "*", "1.0", "/", "leng", ",", "'n_missing'", ":", "leng", "-", "count", ",", "'p_infinite'", ":", "n_infinite", "*", "1.0", "/", "leng", ",", "'n_infinite'", ":", "n_infinite", ",", "'type'", ":", "base", ".", "S_TYPE_UNSUPPORTED", "}", "try", ":", "# pandas 0.17 onwards", "results_data", "[", "'memorysize'", "]", "=", "series", ".", "memory_usage", "(", ")", "except", ":", "results_data", "[", "'memorysize'", "]", "=", "0", "return", "pd", ".", "Series", "(", "results_data", ",", "name", "=", "series", ".", "name", ")" ]
33.774194
21.935484
def generate_repr(*members): """ Decorator that binds an auto-generated ``__repr__()`` function to a class. The generated ``__repr__()`` function prints in following format: <ClassName object(field1=1, field2='A string', field3=[1, 2, 3]) at 0xAAAA> Note that this decorator modifies the given class in place! :param members: An iterable of member names to include into the representation-string. Providing no members yields to inclusion of all member variables and properties in alphabetical order (except if they start with an underscore). To control the representation of each member, you can also pass a tuple where the first element contains the member to print and the second one the representation function (which defaults to the built-in ``repr()``). Using None as representation function is the same as using ``repr()``. Supported members are fields/variables, properties and getter-like functions (functions that accept no arguments). :raises ValueError: Raised when the passed (member, repr-function)-tuples have not a length of 2. :raises AttributeError: Raised when a given member/attribute was not found in class. :raises TypeError: Raised when a provided member is a bound method that is not a getter-like function (means it must accept no parameters). :return: The class armed with an auto-generated __repr__ function. """ def decorator(cls): cls.__repr__ = __repr__ return cls if members: # Prepare members list. members_to_print = list(members) for i, member in enumerate(members_to_print): if isinstance(member, tuple): # Check tuple dimensions. length = len(member) if length == 2: members_to_print[i] = (member[0], member[1] if member[1] else repr) else: raise ValueError("Passed tuple " + repr(member) + " needs to be 2-dimensional, but has " + str(length) + " dimensions.") else: members_to_print[i] = (member, repr) def __repr__(self): return _construct_repr_string(self, members_to_print) else: def __repr__(self): # Need to fetch member variables every time since they are unknown # until class instantation. members_to_print = get_public_members(self) member_repr_list = ((member, repr) for member in sorted(members_to_print, key=str.lower)) return _construct_repr_string(self, member_repr_list) return decorator
[ "def", "generate_repr", "(", "*", "members", ")", ":", "def", "decorator", "(", "cls", ")", ":", "cls", ".", "__repr__", "=", "__repr__", "return", "cls", "if", "members", ":", "# Prepare members list.", "members_to_print", "=", "list", "(", "members", ")", "for", "i", ",", "member", "in", "enumerate", "(", "members_to_print", ")", ":", "if", "isinstance", "(", "member", ",", "tuple", ")", ":", "# Check tuple dimensions.", "length", "=", "len", "(", "member", ")", "if", "length", "==", "2", ":", "members_to_print", "[", "i", "]", "=", "(", "member", "[", "0", "]", ",", "member", "[", "1", "]", "if", "member", "[", "1", "]", "else", "repr", ")", "else", ":", "raise", "ValueError", "(", "\"Passed tuple \"", "+", "repr", "(", "member", ")", "+", "\" needs to be 2-dimensional, but has \"", "+", "str", "(", "length", ")", "+", "\" dimensions.\"", ")", "else", ":", "members_to_print", "[", "i", "]", "=", "(", "member", ",", "repr", ")", "def", "__repr__", "(", "self", ")", ":", "return", "_construct_repr_string", "(", "self", ",", "members_to_print", ")", "else", ":", "def", "__repr__", "(", "self", ")", ":", "# Need to fetch member variables every time since they are unknown", "# until class instantation.", "members_to_print", "=", "get_public_members", "(", "self", ")", "member_repr_list", "=", "(", "(", "member", ",", "repr", ")", "for", "member", "in", "sorted", "(", "members_to_print", ",", "key", "=", "str", ".", "lower", ")", ")", "return", "_construct_repr_string", "(", "self", ",", "member_repr_list", ")", "return", "decorator" ]
45.408451
24.478873
def emg_var(X): ''' variance (assuming a mean of zero) for each variable in the segmented time series (equals abs_energy divided by (seg_size - 1)) ''' segment_width = X.shape[1] return np.sum(X * X, axis=1) / (segment_width - 1)
[ "def", "emg_var", "(", "X", ")", ":", "segment_width", "=", "X", ".", "shape", "[", "1", "]", "return", "np", ".", "sum", "(", "X", "*", "X", ",", "axis", "=", "1", ")", "/", "(", "segment_width", "-", "1", ")" ]
48.2
22.2
def _set_af_ipv4_unicast(self, v, load=False): """ Setter method for af_ipv4_unicast, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/address_family/ipv4/af_ipv4_unicast (container) If this variable is read-only (config: false) in the source YANG file, then _set_af_ipv4_unicast is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_af_ipv4_unicast() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=af_ipv4_unicast.af_ipv4_unicast, is_container='container', presence=True, yang_name="af-ipv4-unicast", rest_name="unicast", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv4 unicast address Family', u'cli-full-no': None, u'callpoint': u'IsisAfIpv4Ucast', u'cli-add-mode': None, u'cli-full-command': None, u'alt-name': u'unicast', u'cli-mode-name': u'config-router-isis-ipv4u'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """af_ipv4_unicast must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=af_ipv4_unicast.af_ipv4_unicast, is_container='container', presence=True, yang_name="af-ipv4-unicast", rest_name="unicast", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv4 unicast address Family', u'cli-full-no': None, u'callpoint': u'IsisAfIpv4Ucast', u'cli-add-mode': None, u'cli-full-command': None, u'alt-name': u'unicast', u'cli-mode-name': u'config-router-isis-ipv4u'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='container', is_config=True)""", }) self.__af_ipv4_unicast = t if hasattr(self, '_set'): self._set()
[ "def", "_set_af_ipv4_unicast", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "af_ipv4_unicast", ".", "af_ipv4_unicast", ",", "is_container", "=", "'container'", ",", "presence", "=", "True", ",", "yang_name", "=", "\"af-ipv4-unicast\"", ",", "rest_name", "=", "\"unicast\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'IPv4 unicast address Family'", ",", "u'cli-full-no'", ":", "None", ",", "u'callpoint'", ":", "u'IsisAfIpv4Ucast'", ",", "u'cli-add-mode'", ":", "None", ",", "u'cli-full-command'", ":", "None", ",", "u'alt-name'", ":", "u'unicast'", ",", "u'cli-mode-name'", ":", "u'config-router-isis-ipv4u'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-isis'", ",", "defining_module", "=", "'brocade-isis'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"af_ipv4_unicast must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=af_ipv4_unicast.af_ipv4_unicast, is_container='container', presence=True, yang_name=\"af-ipv4-unicast\", rest_name=\"unicast\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'IPv4 unicast address Family', u'cli-full-no': None, u'callpoint': u'IsisAfIpv4Ucast', u'cli-add-mode': None, u'cli-full-command': None, u'alt-name': u'unicast', u'cli-mode-name': u'config-router-isis-ipv4u'}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__af_ipv4_unicast", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
93.818182
45.409091
def name(self, src=None): """Return string representing the name of this type.""" res = [_get_type_name(tt, src) for tt in self._types] if len(res) == 2 and "None" in res: res.remove("None") return "?" + res[0] else: return " | ".join(res)
[ "def", "name", "(", "self", ",", "src", "=", "None", ")", ":", "res", "=", "[", "_get_type_name", "(", "tt", ",", "src", ")", "for", "tt", "in", "self", ".", "_types", "]", "if", "len", "(", "res", ")", "==", "2", "and", "\"None\"", "in", "res", ":", "res", ".", "remove", "(", "\"None\"", ")", "return", "\"?\"", "+", "res", "[", "0", "]", "else", ":", "return", "\" | \"", ".", "join", "(", "res", ")" ]
37.5
11.375
def huffman_compute_decode_tree(cls): # type: () -> None """ huffman_compute_decode_tree initializes/builds the static_huffman_tree @return None @raise InvalidEncodingException if there is an encoding problem """ cls.static_huffman_tree = HuffmanNode(None, None) i = 0 for entry in cls.static_huffman_code: parent = cls.static_huffman_tree for idx in range(entry[1] - 1, -1, -1): b = (entry[0] >> idx) & 1 if isinstance(parent[b], bytes): raise InvalidEncodingException('Huffman unique prefix violation :/') # noqa: E501 if idx == 0: parent[b] = chb(i) if i < 256 else EOS() elif parent[b] is None: parent[b] = HuffmanNode(None, None) parent = parent[b] i += 1
[ "def", "huffman_compute_decode_tree", "(", "cls", ")", ":", "# type: () -> None", "cls", ".", "static_huffman_tree", "=", "HuffmanNode", "(", "None", ",", "None", ")", "i", "=", "0", "for", "entry", "in", "cls", ".", "static_huffman_code", ":", "parent", "=", "cls", ".", "static_huffman_tree", "for", "idx", "in", "range", "(", "entry", "[", "1", "]", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "b", "=", "(", "entry", "[", "0", "]", ">>", "idx", ")", "&", "1", "if", "isinstance", "(", "parent", "[", "b", "]", ",", "bytes", ")", ":", "raise", "InvalidEncodingException", "(", "'Huffman unique prefix violation :/'", ")", "# noqa: E501", "if", "idx", "==", "0", ":", "parent", "[", "b", "]", "=", "chb", "(", "i", ")", "if", "i", "<", "256", "else", "EOS", "(", ")", "elif", "parent", "[", "b", "]", "is", "None", ":", "parent", "[", "b", "]", "=", "HuffmanNode", "(", "None", ",", "None", ")", "parent", "=", "parent", "[", "b", "]", "i", "+=", "1" ]
42
15.190476
def decode_vlqs(s): """ Decode str `s` into a list of integers. """ ints = [] i = 0 shift = 0 for c in s: raw = B64_INT[c] cont = VLQ_CONT & raw i = ((VLQ_BASE_MASK & raw) << shift) | i shift += VLQ_SHIFT if not cont: sign = -1 if 1 & i else 1 ints.append((i >> 1) * sign) i = 0 shift = 0 return tuple(ints)
[ "def", "decode_vlqs", "(", "s", ")", ":", "ints", "=", "[", "]", "i", "=", "0", "shift", "=", "0", "for", "c", "in", "s", ":", "raw", "=", "B64_INT", "[", "c", "]", "cont", "=", "VLQ_CONT", "&", "raw", "i", "=", "(", "(", "VLQ_BASE_MASK", "&", "raw", ")", "<<", "shift", ")", "|", "i", "shift", "+=", "VLQ_SHIFT", "if", "not", "cont", ":", "sign", "=", "-", "1", "if", "1", "&", "i", "else", "1", "ints", ".", "append", "(", "(", "i", ">>", "1", ")", "*", "sign", ")", "i", "=", "0", "shift", "=", "0", "return", "tuple", "(", "ints", ")" ]
19.52381
18.380952
def _prepare_atoms(topology, compute_cycles=False): """Compute cycles and add white-/blacklists to atoms.""" atom1 = next(topology.atoms()) has_whitelists = hasattr(atom1, 'whitelist') has_cycles = hasattr(atom1, 'cycles') compute_cycles = compute_cycles and not has_cycles if compute_cycles or not has_whitelists: for atom in topology.atoms(): if compute_cycles: atom.cycles = set() if not has_whitelists: atom.whitelist = OrderedSet() atom.blacklist = OrderedSet() if compute_cycles: bond_graph = nx.Graph() bond_graph.add_nodes_from(topology.atoms()) bond_graph.add_edges_from(topology.bonds()) all_cycles = _find_chordless_cycles(bond_graph, max_cycle_size=8) for atom, cycles in zip(bond_graph.nodes, all_cycles): for cycle in cycles: atom.cycles.add(tuple(cycle))
[ "def", "_prepare_atoms", "(", "topology", ",", "compute_cycles", "=", "False", ")", ":", "atom1", "=", "next", "(", "topology", ".", "atoms", "(", ")", ")", "has_whitelists", "=", "hasattr", "(", "atom1", ",", "'whitelist'", ")", "has_cycles", "=", "hasattr", "(", "atom1", ",", "'cycles'", ")", "compute_cycles", "=", "compute_cycles", "and", "not", "has_cycles", "if", "compute_cycles", "or", "not", "has_whitelists", ":", "for", "atom", "in", "topology", ".", "atoms", "(", ")", ":", "if", "compute_cycles", ":", "atom", ".", "cycles", "=", "set", "(", ")", "if", "not", "has_whitelists", ":", "atom", ".", "whitelist", "=", "OrderedSet", "(", ")", "atom", ".", "blacklist", "=", "OrderedSet", "(", ")", "if", "compute_cycles", ":", "bond_graph", "=", "nx", ".", "Graph", "(", ")", "bond_graph", ".", "add_nodes_from", "(", "topology", ".", "atoms", "(", ")", ")", "bond_graph", ".", "add_edges_from", "(", "topology", ".", "bonds", "(", ")", ")", "all_cycles", "=", "_find_chordless_cycles", "(", "bond_graph", ",", "max_cycle_size", "=", "8", ")", "for", "atom", ",", "cycles", "in", "zip", "(", "bond_graph", ".", "nodes", ",", "all_cycles", ")", ":", "for", "cycle", "in", "cycles", ":", "atom", ".", "cycles", ".", "add", "(", "tuple", "(", "cycle", ")", ")" ]
40.217391
11.956522
def search_ap(self, mode, query): """搜索接入点 查看指定接入点的所有配置信息,包括所有监听端口的配置。 Args: - mode: 搜索模式,可以是domain、ip、host - query: 搜索文本 Returns: 返回一个tuple对象,其格式为(<result>, <ResponseInfo>) - result 成功返回搜索结果,失败返回{"error": "<errMsg string>"} - ResponseInfo 请求的Response信息 """ url = '{0}/v3/aps/search?{1}={2}'.format(self.host, mode, query) return self.__get(url)
[ "def", "search_ap", "(", "self", ",", "mode", ",", "query", ")", ":", "url", "=", "'{0}/v3/aps/search?{1}={2}'", ".", "format", "(", "self", ".", "host", ",", "mode", ",", "query", ")", "return", "self", ".", "__get", "(", "url", ")" ]
29.25
18.1875
def exportSite(self, location): """ This operation exports the portal site configuration to a location you specify. """ params = { "location" : location, "f" : "json" } url = self._url + "/exportSite" return self._post(url=url, param_dict=params)
[ "def", "exportSite", "(", "self", ",", "location", ")", ":", "params", "=", "{", "\"location\"", ":", "location", ",", "\"f\"", ":", "\"json\"", "}", "url", "=", "self", ".", "_url", "+", "\"/exportSite\"", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ")" ]
29.454545
13.818182
def update_endpoint(self, updated_ed): """ Update a previously advertised endpoint_description. :param endpoint_description: an instance of EndpointDescription to update. Must not be None. :return: True if advertised, False if not (e.g. it's already been advertised) """ endpoint_id = updated_ed.get_id() with self._published_endpoints_lock: if self.get_advertised_endpoint(endpoint_id) is None: return False advertise_result = self._update(updated_ed) if advertise_result: self._remove_advertised(endpoint_id) self._add_advertised(updated_ed, advertise_result) return True return False
[ "def", "update_endpoint", "(", "self", ",", "updated_ed", ")", ":", "endpoint_id", "=", "updated_ed", ".", "get_id", "(", ")", "with", "self", ".", "_published_endpoints_lock", ":", "if", "self", ".", "get_advertised_endpoint", "(", "endpoint_id", ")", "is", "None", ":", "return", "False", "advertise_result", "=", "self", ".", "_update", "(", "updated_ed", ")", "if", "advertise_result", ":", "self", ".", "_remove_advertised", "(", "endpoint_id", ")", "self", ".", "_add_advertised", "(", "updated_ed", ",", "advertise_result", ")", "return", "True", "return", "False" ]
35.857143
15.571429
def _ignore_path(cls, path, ignore_list=None, white_list=None): """Returns a whether a path should be ignored or not.""" ignore_list = ignore_list or [] white_list = white_list or [] return (cls._matches_patterns(path, ignore_list) and not cls._matches_patterns(path, white_list))
[ "def", "_ignore_path", "(", "cls", ",", "path", ",", "ignore_list", "=", "None", ",", "white_list", "=", "None", ")", ":", "ignore_list", "=", "ignore_list", "or", "[", "]", "white_list", "=", "white_list", "or", "[", "]", "return", "(", "cls", ".", "_matches_patterns", "(", "path", ",", "ignore_list", ")", "and", "not", "cls", ".", "_matches_patterns", "(", "path", ",", "white_list", ")", ")" ]
53.833333
11.166667
def datetime_to_str(self,format="%Y-%m-%dT%H:%M:%S%ZP"): """ Create a new SArray with all the values cast to str. The string format is specified by the 'format' parameter. Parameters ---------- format : str The format to output the string. Default format is "%Y-%m-%dT%H:%M:%S%ZP". Returns ------- out : SArray[str] The SArray converted to the type 'str'. Examples -------- >>> dt = datetime.datetime(2011, 10, 20, 9, 30, 10, tzinfo=GMT(-5)) >>> sa = turicreate.SArray([dt]) >>> sa.datetime_to_str("%e %b %Y %T %ZP") dtype: str Rows: 1 [20 Oct 2011 09:30:10 GMT-05:00] See Also ---------- str_to_datetime References ---------- [1] Boost date time from string conversion guide (http://www.boost.org/doc/libs/1_48_0/doc/html/date_time/date_time_io.html) """ if(self.dtype != datetime.datetime): raise TypeError("datetime_to_str expects SArray of datetime as input SArray") with cython_context(): return SArray(_proxy=self.__proxy__.datetime_to_str(format))
[ "def", "datetime_to_str", "(", "self", ",", "format", "=", "\"%Y-%m-%dT%H:%M:%S%ZP\"", ")", ":", "if", "(", "self", ".", "dtype", "!=", "datetime", ".", "datetime", ")", ":", "raise", "TypeError", "(", "\"datetime_to_str expects SArray of datetime as input SArray\"", ")", "with", "cython_context", "(", ")", ":", "return", "SArray", "(", "_proxy", "=", "self", ".", "__proxy__", ".", "datetime_to_str", "(", "format", ")", ")" ]
31.052632
25.263158
def xcoord(self): """The x coordinate :class:`xarray.Variable`""" return self.decoder.get_x(self.data, coords=self.data.coords)
[ "def", "xcoord", "(", "self", ")", ":", "return", "self", ".", "decoder", ".", "get_x", "(", "self", ".", "data", ",", "coords", "=", "self", ".", "data", ".", "coords", ")" ]
47
17.333333
def getfile(self): """Gets the full file path of the entered/selected file :returns: str -- the name of the data file to open/create """ current_file = str(self.selectedFiles()[0]) if os.path.isfile(current_file): print 'current_file', current_file if current_file.endswith('.raw') or current_file.endswith('.pst'): fmode = 'r' else: fmode = 'a' else: if not current_file.endswith('.hdf5') and not current_file.endswith('.h5'): current_file += '.hdf5' fmode = 'w-' return current_file, fmode
[ "def", "getfile", "(", "self", ")", ":", "current_file", "=", "str", "(", "self", ".", "selectedFiles", "(", ")", "[", "0", "]", ")", "if", "os", ".", "path", ".", "isfile", "(", "current_file", ")", ":", "print", "'current_file'", ",", "current_file", "if", "current_file", ".", "endswith", "(", "'.raw'", ")", "or", "current_file", ".", "endswith", "(", "'.pst'", ")", ":", "fmode", "=", "'r'", "else", ":", "fmode", "=", "'a'", "else", ":", "if", "not", "current_file", ".", "endswith", "(", "'.hdf5'", ")", "and", "not", "current_file", ".", "endswith", "(", "'.h5'", ")", ":", "current_file", "+=", "'.hdf5'", "fmode", "=", "'w-'", "return", "current_file", ",", "fmode" ]
37.647059
16.941176
def add_signal_handler(): """Adds a signal handler to handle KeyboardInterrupt.""" import signal def handler(sig, frame): if sig == signal.SIGINT: librtmp.RTMP_UserInterrupt() raise KeyboardInterrupt signal.signal(signal.SIGINT, handler)
[ "def", "add_signal_handler", "(", ")", ":", "import", "signal", "def", "handler", "(", "sig", ",", "frame", ")", ":", "if", "sig", "==", "signal", ".", "SIGINT", ":", "librtmp", ".", "RTMP_UserInterrupt", "(", ")", "raise", "KeyboardInterrupt", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "handler", ")" ]
27.8
14.4
def assemble(self, module, *modules, **kwargs): # type: (AbstractModule, *AbstractModule, **Any) -> SeqRecord """Assemble the provided modules into the vector. Arguments: module (`~moclo.base.modules.AbstractModule`): a module to insert in the vector. modules (`~moclo.base.modules.AbstractModule`, optional): additional modules to insert in the vector. The order of the parameters is not important, since modules will be sorted by their start overhang in the function. Returns: `~Bio.SeqRecord.SeqRecord`: the assembled sequence with sequence annotations inherited from the vector and the modules. Raises: `~moclo.errors.DuplicateModules`: when two different modules share the same start overhang, leading in possibly non-deterministic constructs. `~moclo.errors.MissingModule`: when a module has an end overhang that is not shared by any other module, leading to a partial construct only `~moclo.errors.InvalidSequence`: when one of the modules does not match the required module structure (missing site, wrong overhang, etc.). `~moclo.errors.UnusedModules`: when some modules were not used during the assembly (mostly caused by duplicate parts). """ mgr = AssemblyManager( vector=self, modules=[module] + list(modules), name=kwargs.get("name", "assembly"), id_=kwargs.get("id", "assembly"), ) return mgr.assemble()
[ "def", "assemble", "(", "self", ",", "module", ",", "*", "modules", ",", "*", "*", "kwargs", ")", ":", "# type: (AbstractModule, *AbstractModule, **Any) -> SeqRecord", "mgr", "=", "AssemblyManager", "(", "vector", "=", "self", ",", "modules", "=", "[", "module", "]", "+", "list", "(", "modules", ")", ",", "name", "=", "kwargs", ".", "get", "(", "\"name\"", ",", "\"assembly\"", ")", ",", "id_", "=", "kwargs", ".", "get", "(", "\"id\"", ",", "\"assembly\"", ")", ",", ")", "return", "mgr", ".", "assemble", "(", ")" ]
44.131579
24.447368
def Sample(self): """Takes a sample for profiling.""" self._profiling_sample += 1 if self._profiling_sample >= self._profiling_sample_rate: if self._heapy: heap = self._heapy.heap() heap.dump(self._sample_file) self._profiling_sample = 0
[ "def", "Sample", "(", "self", ")", ":", "self", ".", "_profiling_sample", "+=", "1", "if", "self", ".", "_profiling_sample", ">=", "self", ".", "_profiling_sample_rate", ":", "if", "self", ".", "_heapy", ":", "heap", "=", "self", ".", "_heapy", ".", "heap", "(", ")", "heap", ".", "dump", "(", "self", ".", "_sample_file", ")", "self", ".", "_profiling_sample", "=", "0" ]
27
17.1
def dict_given_run_array(samples, thread_min_max): """ Converts an array of information about samples back into a nested sampling run dictionary (see data_processing module docstring for more details). N.B. the output dict only contains the following keys: 'logl', 'thread_label', 'nlive_array', 'theta'. Any other keys giving additional information about the run output cannot be reproduced from the function arguments, and are therefore ommitted. Parameters ---------- samples: numpy array Numpy array containing columns [logl, thread label, change in nlive at sample, (thetas)] with each row representing a single sample. thread_min_max': numpy array, optional 2d array with a row for each thread containing the likelihoods at which it begins and ends. Needed to calculate nlive_array (otherwise this is set to None). Returns ------- ns_run: dict Nested sampling run dict (see data_processing module docstring for more details). """ ns_run = {'logl': samples[:, 0], 'thread_labels': samples[:, 1], 'thread_min_max': thread_min_max, 'theta': samples[:, 3:]} if np.all(~np.isnan(ns_run['thread_labels'])): ns_run['thread_labels'] = ns_run['thread_labels'].astype(int) assert np.array_equal(samples[:, 1], ns_run['thread_labels']), (( 'Casting thread labels from samples array to int has changed ' 'their values!\nsamples[:, 1]={}\nthread_labels={}').format( samples[:, 1], ns_run['thread_labels'])) nlive_0 = (thread_min_max[:, 0] <= ns_run['logl'].min()).sum() assert nlive_0 > 0, 'nlive_0={}'.format(nlive_0) nlive_array = np.zeros(samples.shape[0]) + nlive_0 nlive_array[1:] += np.cumsum(samples[:-1, 2]) # Check if there are multiple threads starting on the first logl point dup_th_starts = (thread_min_max[:, 0] == ns_run['logl'].min()).sum() if dup_th_starts > 1: # In this case we approximate the true nlive (which we dont really # know) by making sure the array's final point is 1 and setting all # points with logl = logl.min() to have the same nlive nlive_array += (1 - nlive_array[-1]) n_logl_min = (ns_run['logl'] == ns_run['logl'].min()).sum() nlive_array[:n_logl_min] = nlive_0 warnings.warn(( 'duplicate starting logls: {} threads start at logl.min()={}, ' 'and {} points have logl=logl.min(). nlive_array may only be ' 'approximately correct.').format( dup_th_starts, ns_run['logl'].min(), n_logl_min), UserWarning) assert nlive_array.min() > 0, (( 'nlive contains 0s or negative values. nlive_0={}' '\nnlive_array = {}\nthread_min_max={}').format( nlive_0, nlive_array, thread_min_max)) assert nlive_array[-1] == 1, ( 'final point in nlive_array != 1.\nnlive_array = ' + str(nlive_array)) ns_run['nlive_array'] = nlive_array return ns_run
[ "def", "dict_given_run_array", "(", "samples", ",", "thread_min_max", ")", ":", "ns_run", "=", "{", "'logl'", ":", "samples", "[", ":", ",", "0", "]", ",", "'thread_labels'", ":", "samples", "[", ":", ",", "1", "]", ",", "'thread_min_max'", ":", "thread_min_max", ",", "'theta'", ":", "samples", "[", ":", ",", "3", ":", "]", "}", "if", "np", ".", "all", "(", "~", "np", ".", "isnan", "(", "ns_run", "[", "'thread_labels'", "]", ")", ")", ":", "ns_run", "[", "'thread_labels'", "]", "=", "ns_run", "[", "'thread_labels'", "]", ".", "astype", "(", "int", ")", "assert", "np", ".", "array_equal", "(", "samples", "[", ":", ",", "1", "]", ",", "ns_run", "[", "'thread_labels'", "]", ")", ",", "(", "(", "'Casting thread labels from samples array to int has changed '", "'their values!\\nsamples[:, 1]={}\\nthread_labels={}'", ")", ".", "format", "(", "samples", "[", ":", ",", "1", "]", ",", "ns_run", "[", "'thread_labels'", "]", ")", ")", "nlive_0", "=", "(", "thread_min_max", "[", ":", ",", "0", "]", "<=", "ns_run", "[", "'logl'", "]", ".", "min", "(", ")", ")", ".", "sum", "(", ")", "assert", "nlive_0", ">", "0", ",", "'nlive_0={}'", ".", "format", "(", "nlive_0", ")", "nlive_array", "=", "np", ".", "zeros", "(", "samples", ".", "shape", "[", "0", "]", ")", "+", "nlive_0", "nlive_array", "[", "1", ":", "]", "+=", "np", ".", "cumsum", "(", "samples", "[", ":", "-", "1", ",", "2", "]", ")", "# Check if there are multiple threads starting on the first logl point", "dup_th_starts", "=", "(", "thread_min_max", "[", ":", ",", "0", "]", "==", "ns_run", "[", "'logl'", "]", ".", "min", "(", ")", ")", ".", "sum", "(", ")", "if", "dup_th_starts", ">", "1", ":", "# In this case we approximate the true nlive (which we dont really", "# know) by making sure the array's final point is 1 and setting all", "# points with logl = logl.min() to have the same nlive", "nlive_array", "+=", "(", "1", "-", "nlive_array", "[", "-", "1", "]", ")", "n_logl_min", "=", "(", "ns_run", "[", "'logl'", "]", "==", "ns_run", "[", "'logl'", "]", ".", "min", "(", ")", ")", ".", "sum", "(", ")", "nlive_array", "[", ":", "n_logl_min", "]", "=", "nlive_0", "warnings", ".", "warn", "(", "(", "'duplicate starting logls: {} threads start at logl.min()={}, '", "'and {} points have logl=logl.min(). nlive_array may only be '", "'approximately correct.'", ")", ".", "format", "(", "dup_th_starts", ",", "ns_run", "[", "'logl'", "]", ".", "min", "(", ")", ",", "n_logl_min", ")", ",", "UserWarning", ")", "assert", "nlive_array", ".", "min", "(", ")", ">", "0", ",", "(", "(", "'nlive contains 0s or negative values. nlive_0={}'", "'\\nnlive_array = {}\\nthread_min_max={}'", ")", ".", "format", "(", "nlive_0", ",", "nlive_array", ",", "thread_min_max", ")", ")", "assert", "nlive_array", "[", "-", "1", "]", "==", "1", ",", "(", "'final point in nlive_array != 1.\\nnlive_array = '", "+", "str", "(", "nlive_array", ")", ")", "ns_run", "[", "'nlive_array'", "]", "=", "nlive_array", "return", "ns_run" ]
47.857143
20.968254
def process(self, sched, coro): """Add the timeout in the scheduler, check for defaults.""" super(TimedOperation, self).process(sched, coro) if sched.default_timeout and not self.timeout: self.set_timeout(sched.default_timeout) if self.timeout and self.timeout != -1: self.coro = coro if self.weak_timeout: self.last_checkpoint = getnow() self.delta = self.timeout - self.last_checkpoint else: self.last_checkpoint = self.delta = None heapq.heappush(sched.timeouts, self)
[ "def", "process", "(", "self", ",", "sched", ",", "coro", ")", ":", "super", "(", "TimedOperation", ",", "self", ")", ".", "process", "(", "sched", ",", "coro", ")", "if", "sched", ".", "default_timeout", "and", "not", "self", ".", "timeout", ":", "self", ".", "set_timeout", "(", "sched", ".", "default_timeout", ")", "if", "self", ".", "timeout", "and", "self", ".", "timeout", "!=", "-", "1", ":", "self", ".", "coro", "=", "coro", "if", "self", ".", "weak_timeout", ":", "self", ".", "last_checkpoint", "=", "getnow", "(", ")", "self", ".", "delta", "=", "self", ".", "timeout", "-", "self", ".", "last_checkpoint", "else", ":", "self", ".", "last_checkpoint", "=", "self", ".", "delta", "=", "None", "heapq", ".", "heappush", "(", "sched", ".", "timeouts", ",", "self", ")" ]
38.375
17.125
def remove_api_keys_from_group(self, group_id, body, **kwargs): # noqa: E501 """Remove API keys from a group. # noqa: E501 An endpoint for removing API keys from groups. **Example usage:** `curl -X DELETE https://api.us-east-1.mbedcloud.com/v3/policy-groups/{group-id}/api-keys -d '[0162056a9a1586f30242590700000000,0117056a9a1586f30242590700000000]' -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.remove_api_keys_from_group(group_id, body, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str group_id: The ID of the group whose API keys are removed. (required) :param SubjectList body: A list of API keys to be removed from the group. (required) :return: UpdatedResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.remove_api_keys_from_group_with_http_info(group_id, body, **kwargs) # noqa: E501 else: (data) = self.remove_api_keys_from_group_with_http_info(group_id, body, **kwargs) # noqa: E501 return data
[ "def", "remove_api_keys_from_group", "(", "self", ",", "group_id", ",", "body", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "remove_api_keys_from_group_with_http_info", "(", "group_id", ",", "body", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "remove_api_keys_from_group_with_http_info", "(", "group_id", ",", "body", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
63.590909
37.818182
def send_token(self, token): """ Used by `Client.connect()`. After connecting to an official server and sending the handshake packets, the client has to send the token acquired through `utils.find_server()`, otherwise the server will drop the connection when receiving any other packet. """ self.send_struct('<B%iB' % len(token), 80, *map(ord, token)) self.server_token = token
[ "def", "send_token", "(", "self", ",", "token", ")", ":", "self", ".", "send_struct", "(", "'<B%iB'", "%", "len", "(", "token", ")", ",", "80", ",", "*", "map", "(", "ord", ",", "token", ")", ")", "self", ".", "server_token", "=", "token" ]
40
16.909091
def _encode(values, salt, min_length, alphabet, separators, guards): """Helper function that does the hash building without argument checks.""" len_alphabet = len(alphabet) len_separators = len(separators) values_hash = sum(x % (i + 100) for i, x in enumerate(values)) encoded = lottery = alphabet[values_hash % len(alphabet)] for i, value in enumerate(values): alphabet_salt = (lottery + salt + alphabet)[:len_alphabet] alphabet = _reorder(alphabet, alphabet_salt) last = _hash(value, alphabet) encoded += last value %= ord(last[0]) + i encoded += separators[value % len_separators] encoded = encoded[:-1] # cut off last separator return (encoded if len(encoded) >= min_length else _ensure_length(encoded, min_length, alphabet, guards, values_hash))
[ "def", "_encode", "(", "values", ",", "salt", ",", "min_length", ",", "alphabet", ",", "separators", ",", "guards", ")", ":", "len_alphabet", "=", "len", "(", "alphabet", ")", "len_separators", "=", "len", "(", "separators", ")", "values_hash", "=", "sum", "(", "x", "%", "(", "i", "+", "100", ")", "for", "i", ",", "x", "in", "enumerate", "(", "values", ")", ")", "encoded", "=", "lottery", "=", "alphabet", "[", "values_hash", "%", "len", "(", "alphabet", ")", "]", "for", "i", ",", "value", "in", "enumerate", "(", "values", ")", ":", "alphabet_salt", "=", "(", "lottery", "+", "salt", "+", "alphabet", ")", "[", ":", "len_alphabet", "]", "alphabet", "=", "_reorder", "(", "alphabet", ",", "alphabet_salt", ")", "last", "=", "_hash", "(", "value", ",", "alphabet", ")", "encoded", "+=", "last", "value", "%=", "ord", "(", "last", "[", "0", "]", ")", "+", "i", "encoded", "+=", "separators", "[", "value", "%", "len_separators", "]", "encoded", "=", "encoded", "[", ":", "-", "1", "]", "# cut off last separator", "return", "(", "encoded", "if", "len", "(", "encoded", ")", ">=", "min_length", "else", "_ensure_length", "(", "encoded", ",", "min_length", ",", "alphabet", ",", "guards", ",", "values_hash", ")", ")" ]
41.4
19.6
def _format_field_name(self, field_name) -> str: """Formats a field's name for usage in SQL. Arguments: field_name: The field name to format. Returns: The specified field name formatted for usage in SQL. """ field = self._get_model_field(field_name) return self.qn(field.column)
[ "def", "_format_field_name", "(", "self", ",", "field_name", ")", "->", "str", ":", "field", "=", "self", ".", "_get_model_field", "(", "field_name", ")", "return", "self", ".", "qn", "(", "field", ".", "column", ")" ]
26.285714
16.428571
def verify_dataset(X, y): """Verifies if a dataset is valid for use i.e. scikit-learn format Used to verify a dataset by returning shape and basic statistics of returned data. This will also provide quick and dirty check on capability of host machine to process the data. Args: X (array-like): Features array y (array-like): Label array Returns: X_shape (2-tuple of int): Shape of X returned y_shape (1-tuple of int): Shape of y returned Raises: AssertionError: `X_shape` must be of length 2 and `y_shape` must be of length 1. `X` must have the same number of elements as `y` i.e. X_shape[0] == y_shape[0]. If any of these conditions are not met, an AssertionError is raised. """ X_shape, y_shape = np.array(X).shape, np.array(y).shape if len(X_shape) != 2: raise exceptions.UserError("X must be 2-dimensional array") if len(y_shape) != 1: raise exceptions.UserError("y must be 1-dimensional array") if X_shape[0] != y_shape[0]: raise exceptions.UserError("X must have same number of elements as y") return dict( features_shape=X_shape, labels_shape=y_shape )
[ "def", "verify_dataset", "(", "X", ",", "y", ")", ":", "X_shape", ",", "y_shape", "=", "np", ".", "array", "(", "X", ")", ".", "shape", ",", "np", ".", "array", "(", "y", ")", ".", "shape", "if", "len", "(", "X_shape", ")", "!=", "2", ":", "raise", "exceptions", ".", "UserError", "(", "\"X must be 2-dimensional array\"", ")", "if", "len", "(", "y_shape", ")", "!=", "1", ":", "raise", "exceptions", ".", "UserError", "(", "\"y must be 1-dimensional array\"", ")", "if", "X_shape", "[", "0", "]", "!=", "y_shape", "[", "0", "]", ":", "raise", "exceptions", ".", "UserError", "(", "\"X must have same number of elements as y\"", ")", "return", "dict", "(", "features_shape", "=", "X_shape", ",", "labels_shape", "=", "y_shape", ")" ]
35.411765
23.029412
def XCHG(cpu, dest, src): """ Exchanges register/memory with register. Exchanges the contents of the destination (first) and source (second) operands. The operands can be two general-purpose registers or a register and a memory location. If a memory operand is referenced, the processor's locking protocol is automatically implemented for the duration of the exchange operation, regardless of the presence or absence of the LOCK prefix or of the value of the IOPL. This instruction is useful for implementing semaphores or similar data structures for process synchronization. The XCHG instruction can also be used instead of the BSWAP instruction for 16-bit operands:: TEMP = DEST DEST = SRC SRC = TEMP :param cpu: current CPU. :param dest: destination operand. :param src: source operand. """ temp = dest.read() dest.write(src.read()) src.write(temp)
[ "def", "XCHG", "(", "cpu", ",", "dest", ",", "src", ")", ":", "temp", "=", "dest", ".", "read", "(", ")", "dest", ".", "write", "(", "src", ".", "read", "(", ")", ")", "src", ".", "write", "(", "temp", ")" ]
39.807692
20.115385
def replace(self, pattern, replacement): """ Replace all instances of a pattern with a replacement. Args: pattern (str): Pattern to replace replacement (str): Text to insert """ for i, line in enumerate(self): if pattern in line: self[i] = line.replace(pattern, replacement)
[ "def", "replace", "(", "self", ",", "pattern", ",", "replacement", ")", ":", "for", "i", ",", "line", "in", "enumerate", "(", "self", ")", ":", "if", "pattern", "in", "line", ":", "self", "[", "i", "]", "=", "line", ".", "replace", "(", "pattern", ",", "replacement", ")" ]
32.454545
11.727273
def list_sets(self, project, **filters): """List the articlesets in a project""" url = URL.articlesets.format(**locals()) return self.get_pages(url, **filters)
[ "def", "list_sets", "(", "self", ",", "project", ",", "*", "*", "filters", ")", ":", "url", "=", "URL", ".", "articlesets", ".", "format", "(", "*", "*", "locals", "(", ")", ")", "return", "self", ".", "get_pages", "(", "url", ",", "*", "*", "filters", ")" ]
45
3.25
def _merge_user_attrs(self, attrs_backend, attrs_out, backend_name): """ merge attributes from one backend search to the attributes dict output """ for attr in attrs_backend: if attr in self.attributes.backend_attributes[backend_name]: attrid = self.attributes.backend_attributes[backend_name][attr] if attrid not in attrs_out: attrs_out[attrid] = attrs_backend[attr]
[ "def", "_merge_user_attrs", "(", "self", ",", "attrs_backend", ",", "attrs_out", ",", "backend_name", ")", ":", "for", "attr", "in", "attrs_backend", ":", "if", "attr", "in", "self", ".", "attributes", ".", "backend_attributes", "[", "backend_name", "]", ":", "attrid", "=", "self", ".", "attributes", ".", "backend_attributes", "[", "backend_name", "]", "[", "attr", "]", "if", "attrid", "not", "in", "attrs_out", ":", "attrs_out", "[", "attrid", "]", "=", "attrs_backend", "[", "attr", "]" ]
45.5
19.3
def permissions(self, actor, inherited=None): """ Permissions for this model, plus permissions inherited from the parent. """ if inherited is not None: return inherited | super(BaseScopedNameMixin, self).permissions(actor) elif self.parent is not None and isinstance(self.parent, PermissionMixin): return self.parent.permissions(actor) | super(BaseScopedNameMixin, self).permissions(actor) else: return super(BaseScopedNameMixin, self).permissions(actor)
[ "def", "permissions", "(", "self", ",", "actor", ",", "inherited", "=", "None", ")", ":", "if", "inherited", "is", "not", "None", ":", "return", "inherited", "|", "super", "(", "BaseScopedNameMixin", ",", "self", ")", ".", "permissions", "(", "actor", ")", "elif", "self", ".", "parent", "is", "not", "None", "and", "isinstance", "(", "self", ".", "parent", ",", "PermissionMixin", ")", ":", "return", "self", ".", "parent", ".", "permissions", "(", "actor", ")", "|", "super", "(", "BaseScopedNameMixin", ",", "self", ")", ".", "permissions", "(", "actor", ")", "else", ":", "return", "super", "(", "BaseScopedNameMixin", ",", "self", ")", ".", "permissions", "(", "actor", ")" ]
52.9
25.5
def write(self, data): """Write data to serial port.""" for chunk in chunks(data, 512): self.wait_to_write() self.comport.write(chunk) self.comport.flush()
[ "def", "write", "(", "self", ",", "data", ")", ":", "for", "chunk", "in", "chunks", "(", "data", ",", "512", ")", ":", "self", ".", "wait_to_write", "(", ")", "self", ".", "comport", ".", "write", "(", "chunk", ")", "self", ".", "comport", ".", "flush", "(", ")" ]
33
7
def respond_list_directory(self, dir_path, query=None): """ Respond to the client with an HTML page listing the contents of the specified directory. :param str dir_path: The path of the directory to list the contents of. """ del query try: dir_contents = os.listdir(dir_path) except os.error: self.respond_not_found() return if os.path.normpath(dir_path) != self.__config['serve_files_root']: dir_contents.append('..') dir_contents.sort(key=lambda a: a.lower()) displaypath = html.escape(urllib.parse.unquote(self.path), quote=True) f = io.BytesIO() encoding = sys.getfilesystemencoding() f.write(b'<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n') f.write(b'<html>\n<title>Directory listing for ' + displaypath.encode(encoding) + b'</title>\n') f.write(b'<body>\n<h2>Directory listing for ' + displaypath.encode(encoding) + b'</h2>\n') f.write(b'<hr>\n<ul>\n') for name in dir_contents: fullname = os.path.join(dir_path, name) displayname = linkname = name # Append / for directories or @ for symbolic links if os.path.isdir(fullname): displayname = name + "/" linkname = name + "/" if os.path.islink(fullname): displayname = name + "@" # Note: a link to a directory displays with @ and links with / f.write(('<li><a href="' + urllib.parse.quote(linkname) + '">' + html.escape(displayname, quote=True) + '</a>\n').encode(encoding)) f.write(b'</ul>\n<hr>\n</body>\n</html>\n') length = f.tell() f.seek(0) self.send_response(200) self.send_header('Content-Type', 'text/html; charset=' + encoding) self.send_header('Content-Length', length) self.end_headers() shutil.copyfileobj(f, self.wfile) f.close() return
[ "def", "respond_list_directory", "(", "self", ",", "dir_path", ",", "query", "=", "None", ")", ":", "del", "query", "try", ":", "dir_contents", "=", "os", ".", "listdir", "(", "dir_path", ")", "except", "os", ".", "error", ":", "self", ".", "respond_not_found", "(", ")", "return", "if", "os", ".", "path", ".", "normpath", "(", "dir_path", ")", "!=", "self", ".", "__config", "[", "'serve_files_root'", "]", ":", "dir_contents", ".", "append", "(", "'..'", ")", "dir_contents", ".", "sort", "(", "key", "=", "lambda", "a", ":", "a", ".", "lower", "(", ")", ")", "displaypath", "=", "html", ".", "escape", "(", "urllib", ".", "parse", ".", "unquote", "(", "self", ".", "path", ")", ",", "quote", "=", "True", ")", "f", "=", "io", ".", "BytesIO", "(", ")", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", "f", ".", "write", "(", "b'<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\\n'", ")", "f", ".", "write", "(", "b'<html>\\n<title>Directory listing for '", "+", "displaypath", ".", "encode", "(", "encoding", ")", "+", "b'</title>\\n'", ")", "f", ".", "write", "(", "b'<body>\\n<h2>Directory listing for '", "+", "displaypath", ".", "encode", "(", "encoding", ")", "+", "b'</h2>\\n'", ")", "f", ".", "write", "(", "b'<hr>\\n<ul>\\n'", ")", "for", "name", "in", "dir_contents", ":", "fullname", "=", "os", ".", "path", ".", "join", "(", "dir_path", ",", "name", ")", "displayname", "=", "linkname", "=", "name", "# Append / for directories or @ for symbolic links", "if", "os", ".", "path", ".", "isdir", "(", "fullname", ")", ":", "displayname", "=", "name", "+", "\"/\"", "linkname", "=", "name", "+", "\"/\"", "if", "os", ".", "path", ".", "islink", "(", "fullname", ")", ":", "displayname", "=", "name", "+", "\"@\"", "# Note: a link to a directory displays with @ and links with /", "f", ".", "write", "(", "(", "'<li><a href=\"'", "+", "urllib", ".", "parse", ".", "quote", "(", "linkname", ")", "+", "'\">'", "+", "html", ".", "escape", "(", "displayname", ",", "quote", "=", "True", ")", "+", "'</a>\\n'", ")", ".", "encode", "(", "encoding", ")", ")", "f", ".", "write", "(", "b'</ul>\\n<hr>\\n</body>\\n</html>\\n'", ")", "length", "=", "f", ".", "tell", "(", ")", "f", ".", "seek", "(", "0", ")", "self", ".", "send_response", "(", "200", ")", "self", ".", "send_header", "(", "'Content-Type'", ",", "'text/html; charset='", "+", "encoding", ")", "self", ".", "send_header", "(", "'Content-Length'", ",", "length", ")", "self", ".", "end_headers", "(", ")", "shutil", ".", "copyfileobj", "(", "f", ",", "self", ".", "wfile", ")", "f", ".", "close", "(", ")", "return" ]
36.478261
21.608696
def provider_parser(subparser): """Specify arguments for AWS Route 53 Lexicon Provider.""" subparser.add_argument("--auth-access-key", help="specify ACCESS_KEY for authentication") subparser.add_argument("--auth-access-secret", help="specify ACCESS_SECRET for authentication") subparser.add_argument( "--private-zone", help=("indicates what kind of hosted zone to use. If true, use " "only private zones. If false, use only public zones")) # TODO: these are only required for testing, we should figure out # a way to remove them & update the integration tests # to dynamically populate the auth credentials that are required. subparser.add_argument( "--auth-username", help="alternative way to specify the ACCESS_KEY for authentication") subparser.add_argument( "--auth-token", help="alternative way to specify the ACCESS_SECRET for authentication")
[ "def", "provider_parser", "(", "subparser", ")", ":", "subparser", ".", "add_argument", "(", "\"--auth-access-key\"", ",", "help", "=", "\"specify ACCESS_KEY for authentication\"", ")", "subparser", ".", "add_argument", "(", "\"--auth-access-secret\"", ",", "help", "=", "\"specify ACCESS_SECRET for authentication\"", ")", "subparser", ".", "add_argument", "(", "\"--private-zone\"", ",", "help", "=", "(", "\"indicates what kind of hosted zone to use. If true, use \"", "\"only private zones. If false, use only public zones\"", ")", ")", "# TODO: these are only required for testing, we should figure out", "# a way to remove them & update the integration tests", "# to dynamically populate the auth credentials that are required.", "subparser", ".", "add_argument", "(", "\"--auth-username\"", ",", "help", "=", "\"alternative way to specify the ACCESS_KEY for authentication\"", ")", "subparser", ".", "add_argument", "(", "\"--auth-token\"", ",", "help", "=", "\"alternative way to specify the ACCESS_SECRET for authentication\"", ")" ]
53.833333
24.055556
def copy_submission_to_destination(self, src_filename, dst_subdir, submission_id): """Copies submission to target directory. Args: src_filename: source filename of the submission dst_subdir: subdirectory of the target directory where submission should be copied to submission_id: ID of the submission, will be used as a new submission filename (before extension) """ extension = [e for e in ALLOWED_EXTENSIONS if src_filename.endswith(e)] if len(extension) != 1: logging.error('Invalid submission extension: %s', src_filename) return dst_filename = os.path.join(self.target_dir, dst_subdir, submission_id + extension[0]) cmd = ['gsutil', 'cp', src_filename, dst_filename] if subprocess.call(cmd) != 0: logging.error('Can\'t copy submission to destination') else: logging.info('Submission copied to: %s', dst_filename)
[ "def", "copy_submission_to_destination", "(", "self", ",", "src_filename", ",", "dst_subdir", ",", "submission_id", ")", ":", "extension", "=", "[", "e", "for", "e", "in", "ALLOWED_EXTENSIONS", "if", "src_filename", ".", "endswith", "(", "e", ")", "]", "if", "len", "(", "extension", ")", "!=", "1", ":", "logging", ".", "error", "(", "'Invalid submission extension: %s'", ",", "src_filename", ")", "return", "dst_filename", "=", "os", ".", "path", ".", "join", "(", "self", ".", "target_dir", ",", "dst_subdir", ",", "submission_id", "+", "extension", "[", "0", "]", ")", "cmd", "=", "[", "'gsutil'", ",", "'cp'", ",", "src_filename", ",", "dst_filename", "]", "if", "subprocess", ".", "call", "(", "cmd", ")", "!=", "0", ":", "logging", ".", "error", "(", "'Can\\'t copy submission to destination'", ")", "else", ":", "logging", ".", "info", "(", "'Submission copied to: %s'", ",", "dst_filename", ")" ]
41.73913
21.217391
def get_CV_prediction(self): """ Returns: np.ndarray: Predictions on the hold-out folds (unseen data, corresponds to :py:attr:`y`). """ # TODO: get it from the test_prediction ... # test_id, prediction # sort by test_id predict_vec = np.zeros((self._n_rows, self._concise_model._num_tasks)) for fold, train, test in self._kf: acc = self._cv_model[fold].get_accuracy() predict_vec[test, :] = acc["y_test_prediction"] return predict_vec
[ "def", "get_CV_prediction", "(", "self", ")", ":", "# TODO: get it from the test_prediction ...", "# test_id, prediction", "# sort by test_id", "predict_vec", "=", "np", ".", "zeros", "(", "(", "self", ".", "_n_rows", ",", "self", ".", "_concise_model", ".", "_num_tasks", ")", ")", "for", "fold", ",", "train", ",", "test", "in", "self", ".", "_kf", ":", "acc", "=", "self", ".", "_cv_model", "[", "fold", "]", ".", "get_accuracy", "(", ")", "predict_vec", "[", "test", ",", ":", "]", "=", "acc", "[", "\"y_test_prediction\"", "]", "return", "predict_vec" ]
40.769231
16.923077
def degrees_of_freedom(self): """ Returns the number of degrees of freedom. """ if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None # Temporary hack: get the studentized residuals, which uses the massaged data # This should later be changed to get_massaged_data() r = self.studentized_residuals() # Happens if data / functions not defined if r == None: return # calculate the number of points N = 0.0 for i in range(len(r)): N += len(r[i]) return N-len(self._pnames)
[ "def", "degrees_of_freedom", "(", "self", ")", ":", "if", "len", "(", "self", ".", "_set_xdata", ")", "==", "0", "or", "len", "(", "self", ".", "_set_ydata", ")", "==", "0", ":", "return", "None", "# Temporary hack: get the studentized residuals, which uses the massaged data", "# This should later be changed to get_massaged_data()", "r", "=", "self", ".", "studentized_residuals", "(", ")", "# Happens if data / functions not defined", "if", "r", "==", "None", ":", "return", "# calculate the number of points", "N", "=", "0.0", "for", "i", "in", "range", "(", "len", "(", "r", ")", ")", ":", "N", "+=", "len", "(", "r", "[", "i", "]", ")", "return", "N", "-", "len", "(", "self", ".", "_pnames", ")" ]
33.555556
17
def _poll_slapd(self, timeout=DEFAULT_STARTUP_DELAY): """Poll slapd port until available.""" begin = time.time() time.sleep(0.5) while time.time() < begin + timeout: if self._process.poll() is not None: raise RuntimeError("LDAP server has exited before starting listen.") s = socket.socket() try: s.connect(('localhost', self.port)) except socket.error: # Not ready yet, sleep time.sleep(0.5) else: return finally: s.close() raise RuntimeError("LDAP server not responding within %s seconds." % timeout)
[ "def", "_poll_slapd", "(", "self", ",", "timeout", "=", "DEFAULT_STARTUP_DELAY", ")", ":", "begin", "=", "time", ".", "time", "(", ")", "time", ".", "sleep", "(", "0.5", ")", "while", "time", ".", "time", "(", ")", "<", "begin", "+", "timeout", ":", "if", "self", ".", "_process", ".", "poll", "(", ")", "is", "not", "None", ":", "raise", "RuntimeError", "(", "\"LDAP server has exited before starting listen.\"", ")", "s", "=", "socket", ".", "socket", "(", ")", "try", ":", "s", ".", "connect", "(", "(", "'localhost'", ",", "self", ".", "port", ")", ")", "except", "socket", ".", "error", ":", "# Not ready yet, sleep", "time", ".", "sleep", "(", "0.5", ")", "else", ":", "return", "finally", ":", "s", ".", "close", "(", ")", "raise", "RuntimeError", "(", "\"LDAP server not responding within %s seconds.\"", "%", "timeout", ")" ]
33
19.190476
def reset(self): """Remove all the information from previous dataset before loading a new dataset. """ # store current dataset max_dataset_history = self.value('max_dataset_history') keep_recent_datasets(max_dataset_history, self.info) # reset all the widgets self.labels.reset() self.channels.reset() self.info.reset() self.notes.reset() self.overview.reset() self.spectrum.reset() self.traces.reset()
[ "def", "reset", "(", "self", ")", ":", "# store current dataset", "max_dataset_history", "=", "self", ".", "value", "(", "'max_dataset_history'", ")", "keep_recent_datasets", "(", "max_dataset_history", ",", "self", ".", "info", ")", "# reset all the widgets", "self", ".", "labels", ".", "reset", "(", ")", "self", ".", "channels", ".", "reset", "(", ")", "self", ".", "info", ".", "reset", "(", ")", "self", ".", "notes", ".", "reset", "(", ")", "self", ".", "overview", ".", "reset", "(", ")", "self", ".", "spectrum", ".", "reset", "(", ")", "self", ".", "traces", ".", "reset", "(", ")" ]
29.411765
16.058824
def video_pos(self): """ Returns: (int, int, int, int): Video spatial position (x1, y1, x2, y2) where (x1, y1) is top left, and (x2, y2) is bottom right. All values in px. """ position_string = self._player_interface.VideoPos(ObjectPath('/not/used')) return list(map(int, position_string.split(" ")))
[ "def", "video_pos", "(", "self", ")", ":", "position_string", "=", "self", ".", "_player_interface", ".", "VideoPos", "(", "ObjectPath", "(", "'/not/used'", ")", ")", "return", "list", "(", "map", "(", "int", ",", "position_string", ".", "split", "(", "\" \"", ")", ")", ")" ]
47.375
25.625