text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Add a battery level reward <END_TASK> <USER_TASK:> Description: def reward_battery(self): """ Add a battery level reward """
if not 'battery' in self.mode: return mode = self.mode['battery'] if mode and mode and self.__test_cond(mode): self.logger.debug('Battery out') self.player.stats['reward'] += mode['reward'] self.player.game_over = self.player.game_over or mode['terminal']
<SYSTEM_TASK:> Add a food collision reward <END_TASK> <USER_TASK:> Description: def reward_item(self, item_type): """ Add a food collision reward """
assert isinstance(item_type, str) if not 'items' in self.mode: return mode = self.mode['items'] if mode and mode[item_type] and self.__test_cond(mode[item_type]): self.logger.debug("{item_type} consumed".format(item_type=item_type)) self.player.stats['reward'] += mode[item_type]['reward'] self.player.stats['score'] += mode[item_type]['reward'] self.player.game_over = self.player.game_over or mode[item_type]['terminal']
<SYSTEM_TASK:> Add a wall collision reward <END_TASK> <USER_TASK:> Description: def reward_wall(self): """ Add a wall collision reward """
if not 'wall' in self.mode: return mode = self.mode['wall'] if mode and mode and self.__test_cond(mode): self.logger.debug("Wall {x}/{y}'".format(x=self.bumped_x, y=self.bumped_y)) self.player.stats['reward'] += mode['reward'] self.player.game_over = self.player.game_over or mode['terminal']
<SYSTEM_TASK:> Add an exploration reward <END_TASK> <USER_TASK:> Description: def reward_explore(self): """ Add an exploration reward """
if not 'explore' in self.mode: return mode = self.mode['explore'] if mode and mode['reward'] and self.__test_cond(mode): self.player.stats['reward'] += mode['reward'] self.player.stats['score'] += mode['reward'] self.player.game_over = self.player.game_over or mode['terminal']
<SYSTEM_TASK:> Add an end goal reward <END_TASK> <USER_TASK:> Description: def reward_goal(self): """ Add an end goal reward """
if not 'goal' in self.mode: return mode = self.mode['goal'] if mode and mode['reward'] and self.__test_cond(mode): if mode['reward'] > 0: self.logger.info("Escaped!!") self.player.stats['reward'] += mode['reward'] self.player.stats['score'] += mode['reward'] self.player.game_over = self.player.game_over or mode['terminal']
<SYSTEM_TASK:> Add a wall proximity reward <END_TASK> <USER_TASK:> Description: def reward_proximity(self): """ Add a wall proximity reward """
if not 'proximity' in self.mode: return mode = self.mode['proximity'] # Calculate proximity reward reward = 0 for sensor in self.player.sensors: if sensor.sensed_type == 'wall': reward += sensor.proximity_norm() else: reward += 1 reward /= len(self.player.sensors) #reward = min(1.0, reward * 2) reward = min(1.0, reward * reward) # TODO: Configurable bonus reward threshold. Pass extra args to `__test_cond`? #if mode and mode and reward > 0.75 and self.__test_cond(mode): if mode and mode and self.__test_cond(mode): # Apply bonus reward *= mode['reward'] self.player.stats['reward'] += reward
<SYSTEM_TASK:> Return the sitemap URI based on maps or explicit settings. <END_TASK> <USER_TASK:> Description: def sitemap(self): """Return the sitemap URI based on maps or explicit settings."""
if (self.sitemap_name is not None): return(self.sitemap_name) return(self.sitemap_uri(self.resource_list_name))
<SYSTEM_TASK:> Read resource list from specified URI else raise exception. <END_TASK> <USER_TASK:> Description: def read_resource_list(self, uri): """Read resource list from specified URI else raise exception."""
self.logger.info("Reading resource list %s" % (uri)) try: resource_list = ResourceList(allow_multifile=self.allow_multifile, mapper=self.mapper) resource_list.read(uri=uri) except Exception as e: raise ClientError("Can't read source resource list from %s (%s)" % (uri, str(e))) self.logger.debug("Finished reading resource list") return(resource_list)
<SYSTEM_TASK:> Read source description to find resource list. <END_TASK> <USER_TASK:> Description: def find_resource_list_from_source_description(self, uri): """Read source description to find resource list. Raises a ClientError in cases where the client might look for a source description in another location, but a ClientFatalError if a source description is found but there is some problem using it. """
self.logger.info("Reading source description %s" % (uri)) try: sd = SourceDescription() sd.read(uri=uri) except Exception as e: raise ClientError( "Can't read source description from %s (%s)" % (uri, str(e))) if (len(sd) == 0): raise ClientFatalError( "Source description %s has no sources" % (uri)) elif (len(sd) > 1): raise ClientFatalError( "Source description %s has multiple sources, specify one " "with --capabilitylist" % (uri)) self.logger.info("Finished reading source description") cluri = sd.resources.first().uri uri = urljoin(uri, cluri) # FIXME - Should relative URI handling be elsewhere? return(self.find_resource_list_from_capability_list(uri))
<SYSTEM_TASK:> Read capability list to find resource list. <END_TASK> <USER_TASK:> Description: def find_resource_list_from_capability_list(self, uri): """Read capability list to find resource list. Raises a ClientError in cases where the client might look for a capability list in another location, but a ClientFatalError if a capability list is found but there is some problem using it. """
self.logger.info("Reading capability list %s" % (uri)) try: cl = CapabilityList() cl.read(uri=uri) except Exception as e: raise ClientError( "Can't read capability list from %s (%s)" % (uri, str(e))) if (not cl.has_capability('resourcelist')): raise ClientFatalError( "Capability list %s does not describe a resource list" % (uri)) rluri = cl.capability_info('resourcelist').uri return(urljoin(uri, rluri))
<SYSTEM_TASK:> Finf resource list by hueristics, returns ResourceList object. <END_TASK> <USER_TASK:> Description: def find_resource_list(self): """Finf resource list by hueristics, returns ResourceList object. 1. Use explicitly specified self.sitemap_name (and fail if that doesn't work) 2. Use explicitly specified self.capability_list_uri (and fail is that doesn't work) 3. Look for base_url/.well-known/resourcesync (then look for capability, look for resourcelist) 4. Look for host/.well-known/resourcesync (then look for capability, look for resourcelist) 5. Look for base_url/resourcelist.xml 6. Look for base_url/sitemap.xml 7. Look for host/sitemap.xml """
# 1 & 2 if (self.sitemap_name is not None): return(self.read_resource_list(self.sitemap_name)) if (self.capability_list_uri is not None): rluri = self.find_resource_list_from_capability_list(self.capability_list_uri) return(self.read_resource_list(rluri)) # 3 & 4 parts = urlsplit(self.sitemap) uri_host = urlunsplit([parts[0], parts[1], '', '', '']) errors = [] for uri in [urljoin(self.sitemap, '.well-known/resourcesync'), urljoin(uri_host, '.well-known/resourcesync')]: uri = uri.lstrip('file:///') # urljoin adds this for local files try: rluri = self.find_resource_list_from_source_description(uri) return(self.read_resource_list(rluri)) except ClientError as e: errors.append(str(e)) # 5, 6 & 7 for uri in [urljoin(self.sitemap, 'resourcelist.xml'), urljoin(self.sitemap, 'sitemap.xml'), urljoin(uri_host, 'sitemap.xml')]: uri = uri.lstrip('file:///') # urljoin adds this for local files try: return(self.read_resource_list(uri)) except ClientError as e: errors.append(str(e)) raise ClientFatalError( "Failed to find source resource list from common patterns (%s)" % ". ".join(errors))
<SYSTEM_TASK:> Return a resource list for files on local disk. <END_TASK> <USER_TASK:> Description: def build_resource_list(self, paths=None, set_path=False): """Return a resource list for files on local disk. The set of files is taken by disk scan from the paths specified or else defaults to the paths specified in the current mappings paths - override paths from mappings if specified set_path - set true to set the path information for each resource included. This is used to build a resource list as the basis for creating a dump. Return ResourceList. Uses existing self.mapper settings. """
# 0. Sanity checks, parse paths is specified if (len(self.mapper) < 1): raise ClientFatalError( "No source to destination mapping specified") if (paths is not None): # Expect comma separated list of paths paths = paths.split(',') # 1. Build from disk rlb = ResourceListBuilder(set_hashes=self.hashes, mapper=self.mapper) rlb.set_path = set_path try: rlb.add_exclude_files(self.exclude_patterns) rl = rlb.from_disk(paths=paths) except ValueError as e: raise ClientFatalError(str(e)) # 2. Set defaults and overrides rl.allow_multifile = self.allow_multifile rl.pretty_xml = self.pretty_xml rl.mapper = self.mapper if (self.max_sitemap_entries is not None): rl.max_sitemap_entries = self.max_sitemap_entries return(rl)
<SYSTEM_TASK:> Update resource from uri to filename on local system. <END_TASK> <USER_TASK:> Description: def update_resource(self, resource, filename, change=None): """Update resource from uri to filename on local system. Update means three things: 1. GET resources 2. set mtime in local time to be equal to timestamp in UTC (should perhaps or at least warn if different from LastModified from the GET response instead but maybe warn if different (or just earlier than) the lastmod we expected from the resource list 3. check that resource matches expected information Also update self.last_timestamp if the timestamp (in source frame) of this resource is later and the current value. Returns the number of resources updated/created (0 or 1) """
path = os.path.dirname(filename) distutils.dir_util.mkpath(path) num_updated = 0 if (self.dryrun): self.logger.info( "dryrun: would GET %s --> %s" % (resource.uri, filename)) else: # 1. GET for try_i in range(1, self.tries + 1): try: r = requests.get(resource.uri, timeout=self.timeout, stream=True) # Fail on 4xx or 5xx r.raise_for_status() with open(filename, 'wb') as fd: for chunk in r.iter_content(chunk_size=1024): fd.write(chunk) num_updated += 1 break except requests.Timeout as e: if try_i < self.tries: msg = 'Download timed out, retrying...' self.logger.info(msg) # Continue loop else: # No more tries left, so fail msg = "Failed to GET %s after %s tries -- %s" % (resource.uri, self.tries, str(e)) if (self.ignore_failures): self.logger.warning(msg) return(num_updated) else: raise ClientFatalError(msg) except (requests.RequestException, IOError) as e: msg = "Failed to GET %s -- %s" % (resource.uri, str(e)) if (self.ignore_failures): self.logger.warning(msg) return(num_updated) else: raise ClientFatalError(msg) # 2. set timestamp if we have one if (resource.timestamp is not None): unixtime = int(resource.timestamp) # no fractional os.utime(filename, (unixtime, unixtime)) if (resource.timestamp > self.last_timestamp): self.last_timestamp = resource.timestamp self.log_event(Resource(resource=resource, change=change)) # 3. sanity check length = os.stat(filename).st_size if (resource.length is not None and resource.length != length): self.logger.info( "Downloaded size for %s of %d bytes does not match expected %d bytes" % (resource.uri, length, resource.length)) if (len(self.hashes) > 0): self.check_hashes(filename, resource) return(num_updated)
<SYSTEM_TASK:> Check all hashes present in self.hashes _and_ resource object. <END_TASK> <USER_TASK:> Description: def check_hashes(self, filename, resource): """Check all hashes present in self.hashes _and_ resource object. Simply shows warning for mismatch, does not raise exception or otherwise stop process. """
# which hashes to calculate? hashes = [] if ('md5' in self.hashes and resource.md5 is not None): hashes.append('md5') if ('sha-1' in self.hashes and resource.sha1 is not None): hashes.append('sha-1') if ('sha-256' in self.hashes and resource.sha256 is not None): hashes.append('sha-256') # calculate hasher = Hashes(hashes, filename) # check and report if ('md5' in hashes and resource.md5 != hasher.md5): self.logger.info( "MD5 mismatch for %s, got %s but expected %s" % (resource.uri, hasher.md5, resource.md5)) if ('sha-1' in hashes and resource.sha1 != hasher.sha1): self.logger.info( "SHA-1 mismatch for %s, got %s but expected %s" % (resource.uri, hasher.sha1, resource.sha1)) if ('sha-256' in hashes and resource.sha256 != hasher.sha256): self.logger.info( "SHA-256 mismatch for %s, got %s but expected %s" % (resource.uri, hasher.sha256, resource.sha256))
<SYSTEM_TASK:> Delete copy of resource in filename on local system. <END_TASK> <USER_TASK:> Description: def delete_resource(self, resource, filename, allow_deletion=False): """Delete copy of resource in filename on local system. Will only actually do the deletion if allow_deletion is True. Regardless of whether the deletion occurs, self.last_timestamp will be updated if the resource.timestamp is later than the current value. Returns the number of files actually deleted (0 or 1). """
num_deleted = 0 uri = resource.uri if (resource.timestamp is not None and resource.timestamp > self.last_timestamp): self.last_timestamp = resource.timestamp if (allow_deletion): if (self.dryrun): self.logger.info( "dryrun: would delete %s -> %s" % (uri, filename)) else: try: os.unlink(filename) num_deleted += 1 self.logger.info("deleted: %s -> %s" % (uri, filename)) self.log_event( Resource( resource=resource, change="deleted")) except OSError as e: msg = "Failed to DELETE %s -> %s : %s" % ( uri, filename, str(e)) # if (self.ignore_failures): self.logger.warning(msg) # return # else: # raise ClientFatalError(msg) else: self.logger.info( "nodelete: would delete %s (--delete to enable)" % uri) return(num_deleted)
<SYSTEM_TASK:> Parse any ResourceSync document and show information. <END_TASK> <USER_TASK:> Description: def parse_document(self): """Parse any ResourceSync document and show information. Will use sitemap URI taken either from explicit self.sitemap_name or derived from the mappings supplied. """
s = Sitemap() self.logger.info("Reading sitemap(s) from %s ..." % (self.sitemap)) try: list = s.parse_xml(url_or_file_open(self.sitemap)) except IOError as e: raise ClientFatalError("Cannot read document (%s)" % str(e)) num_entries = len(list.resources) capability = '(unknown capability)' if ('capability' in list.md): capability = list.md['capability'] print("Parsed %s document with %d entries" % (capability, num_entries)) if (self.verbose): to_show = 100 override_str = ' (override with --max-sitemap-entries)' if (self.max_sitemap_entries): to_show = self.max_sitemap_entries override_str = '' if (num_entries > to_show): print( "Showing first %d entries sorted by URI%s..." % (to_show, override_str)) n = 0 for resource in list: print('[%d] %s' % (n, str(resource))) n += 1 if (n >= to_show): break
<SYSTEM_TASK:> Write a Resource List or a Resource Dump for files on local disk. <END_TASK> <USER_TASK:> Description: def write_resource_list( self, paths=None, outfile=None, links=None, dump=None): """Write a Resource List or a Resource Dump for files on local disk. Set of resources included is based on paths setting or else the mappings. Optionally links can be added. Output will be to stdout unless outfile is specified. If dump is true then a Resource Dump is written instead of a Resource List. If outfile is not set then self.default_resource_dump will be used. """
rl = self.build_resource_list(paths=paths, set_path=dump) if (links is not None): rl.ln = links if (dump): if (outfile is None): outfile = self.default_resource_dump self.logger.info("Writing resource dump to %s..." % (dump)) d = Dump(resources=rl, format=self.dump_format) d.write(basename=outfile) else: if (outfile is None): try: print(rl.as_xml()) except ListBaseIndexError as e: raise ClientFatalError( "%s. Use --output option to specify base name for output files." % str(e)) else: rl.write(basename=outfile)
<SYSTEM_TASK:> Write a change list. <END_TASK> <USER_TASK:> Description: def write_change_list(self, paths=None, outfile=None, ref_sitemap=None, newref_sitemap=None, empty=None, links=None, dump=None): """Write a change list. Unless the both ref_sitemap and newref_sitemap are specified then the Change List is calculated between the reference an the current state of files on disk. The files on disk are scanned based either on the paths setting or else on the mappings. """
cl = ChangeList(ln=links) if (not empty): # 1. Get and parse reference sitemap old_rl = self.read_reference_resource_list(ref_sitemap) # 2. Depending on whether a newref_sitemap was specified, either read that # or build resource list from files on disk if (newref_sitemap is None): # Get resource list from disk new_rl = self.build_resource_list(paths=paths, set_path=dump) else: new_rl = self.read_reference_resource_list( newref_sitemap, name='new reference') # 3. Calculate change list (same, updated, deleted, created) = old_rl.compare(new_rl) cl.add_changed_resources(updated, change='updated') cl.add_changed_resources(deleted, change='deleted') cl.add_changed_resources(created, change='created') # 4. Write out change list cl.mapper = self.mapper cl.pretty_xml = self.pretty_xml if (self.max_sitemap_entries is not None): cl.max_sitemap_entries = self.max_sitemap_entries if (outfile is None): print(cl.as_xml()) else: cl.write(basename=outfile) self.write_dump_if_requested(cl, dump)
<SYSTEM_TASK:> Write a Capability List to outfile or STDOUT. <END_TASK> <USER_TASK:> Description: def write_capability_list(self, capabilities=None, outfile=None, links=None): """Write a Capability List to outfile or STDOUT."""
capl = CapabilityList(ln=links) capl.pretty_xml = self.pretty_xml if (capabilities is not None): for name in capabilities.keys(): capl.add_capability(name=name, uri=capabilities[name]) if (outfile is None): print(capl.as_xml()) else: capl.write(basename=outfile)
<SYSTEM_TASK:> Read reference resource list and return the ResourceList object. <END_TASK> <USER_TASK:> Description: def read_reference_resource_list(self, ref_sitemap, name='reference'): """Read reference resource list and return the ResourceList object. The name parameter is used just in output messages to say what type of resource list is being read. """
rl = ResourceList() self.logger.info( "Reading %s resource list from %s ..." % (name, ref_sitemap)) rl.mapper = self.mapper rl.read(uri=ref_sitemap, index_only=(not self.allow_multifile)) num_entries = len(rl.resources) self.logger.info( "Read %s resource list with %d entries in %d sitemaps" % (name, num_entries, rl.num_files)) if (self.verbose): to_show = 100 override_str = ' (override with --max-sitemap-entries)' if (self.max_sitemap_entries): to_show = self.max_sitemap_entries override_str = '' if (num_entries > to_show): print( "Showing first %d entries sorted by URI%s..." % (to_show, override_str)) n = 0 for r in rl.resources: print(r) n += 1 if (n >= to_show): break return(rl)
<SYSTEM_TASK:> Prune any hashes not in source resource or change list. <END_TASK> <USER_TASK:> Description: def prune_hashes(self, hashes, list_type): """Prune any hashes not in source resource or change list."""
discarded = [] for hash in hashes: if (hash in self.hashes): self.hashes.discard(hash) discarded.append(hash) self.logger.info("Not calculating %s hash(es) on destination as not present " "in source %s list" % (', '.join(sorted(discarded)), list_type))
<SYSTEM_TASK:> Write log message regarding status in standard form. <END_TASK> <USER_TASK:> Description: def log_status(self, in_sync=True, incremental=False, audit=False, same=None, created=0, updated=0, deleted=0, to_delete=0): """Write log message regarding status in standard form. Split this off so all messages from baseline/audit/incremental are written in a consistent form. """
if (audit): words = {'created': 'to create', 'updated': 'to update', 'deleted': 'to delete'} else: words = {'created': 'created', 'updated': 'updated', 'deleted': 'deleted'} if in_sync: # status rather than action status = "NO CHANGES" if incremental else "IN SYNC" else: if audit: status = "NOT IN SYNC" elif (to_delete > deleted): # will need --delete status = "PART APPLIED" if incremental else"PART SYNCED" words['deleted'] = 'to delete (--delete)' deleted = to_delete else: status = "CHANGES APPLIED" if incremental else "SYNCED" same = "" if (same is None) else ("same=%d, " % same) self.logger.warning("Status: %15s (%s%s=%d, %s=%d, %s=%d)" % (status, same, words['created'], created, words['updated'], updated, words['deleted'], deleted))
<SYSTEM_TASK:> Coerce the day of the month to an internal value that may or <END_TASK> <USER_TASK:> Description: def _normalize_day(year, month, day): """ Coerce the day of the month to an internal value that may or may not match the "public" value. With the exception of the last three days of every month, all days are stored as-is. The last three days are instead stored as -1 (the last), -2 (first from last) and -3 (second from last). Therefore, for a 28-day month, the last week is as follows: Day | 22 23 24 25 26 27 28 Value | 22 23 24 25 -3 -2 -1 For a 29-day month, the last week is as follows: Day | 23 24 25 26 27 28 29 Value | 23 24 25 26 -3 -2 -1 For a 30-day month, the last week is as follows: Day | 24 25 26 27 28 29 30 Value | 24 25 26 27 -3 -2 -1 For a 31-day month, the last week is as follows: Day | 25 26 27 28 29 30 31 Value | 25 26 27 28 -3 -2 -1 This slightly unintuitive system makes some temporal arithmetic produce a more desirable outcome. :param year: :param month: :param day: :return: """
if year < MIN_YEAR or year > MAX_YEAR: raise ValueError("Year out of range (%d..%d)" % (MIN_YEAR, MAX_YEAR)) if month < 1 or month > 12: raise ValueError("Month out of range (1..12)") days_in_month = DAYS_IN_MONTH[(year, month)] if day in (days_in_month, -1): return year, month, -1 if day in (days_in_month - 1, -2): return year, month, -2 if day in (days_in_month - 2, -3): return year, month, -3 if 1 <= day <= days_in_month - 3: return year, month, int(day) # TODO improve this error message raise ValueError("Day %d out of range (1..%d, -1, -2 ,-3)" % (day, days_in_month))
<SYSTEM_TASK:> Convert from a ClockTime relative to a given epoch. <END_TASK> <USER_TASK:> Description: def from_clock_time(cls, clock_time, epoch): """ Convert from a ClockTime relative to a given epoch. """
try: clock_time = ClockTime(*clock_time) except (TypeError, ValueError): raise ValueError("Clock time must be a 2-tuple of (s, ns)") else: ordinal = clock_time.seconds // 86400 return Date.from_ordinal(ordinal + epoch.date().to_ordinal())
<SYSTEM_TASK:> Convert from a native Python `datetime.time` value. <END_TASK> <USER_TASK:> Description: def from_native(cls, t): """ Convert from a native Python `datetime.time` value. """
second = (1000000 * t.second + t.microsecond) / 1000000 return Time(t.hour, t.minute, second, t.tzinfo)
<SYSTEM_TASK:> Convert from a `.ClockTime` relative to a given epoch. <END_TASK> <USER_TASK:> Description: def from_clock_time(cls, clock_time, epoch): """ Convert from a `.ClockTime` relative to a given epoch. """
clock_time = ClockTime(*clock_time) ts = clock_time.seconds % 86400 nanoseconds = int(1000000000 * ts + clock_time.nanoseconds) return Time.from_ticks(epoch.time().ticks + nanoseconds / 1000000000)
<SYSTEM_TASK:> Convert to a native Python `datetime.time` value. <END_TASK> <USER_TASK:> Description: def to_native(self): """ Convert to a native Python `datetime.time` value. """
h, m, s = self.hour_minute_second s, ns = nano_divmod(s, 1) ms = int(nano_mul(ns, 1000000)) return time(h, m, s, ms)
<SYSTEM_TASK:> Create a personal OAuth token. <END_TASK> <USER_TASK:> Description: def tokens_create(name, user, scopes, internal): """Create a personal OAuth token."""
token = Token.create_personal( name, user.id, scopes=scopes, is_internal=internal) db.session.commit() click.secho(token.access_token, fg='blue')
<SYSTEM_TASK:> Normalize residuals by the level of the variable. <END_TASK> <USER_TASK:> Description: def normalize_residuals(self, points): """Normalize residuals by the level of the variable."""
residuals = self.evaluate_residual(points) solutions = self.evaluate_solution(points) return [resid / soln for resid, soln in zip(residuals, solutions)]
<SYSTEM_TASK:> Convert UUID. <END_TASK> <USER_TASK:> Description: def apply(self, query, value, alias): """Convert UUID. :param query: SQLAlchemy query object. :param value: UUID value. :param alias: Alias of the column. :returns: Filtered query matching the UUID value. """
try: value = uuid.UUID(value) return query.filter(self.column == value) except ValueError: return query
<SYSTEM_TASK:> Verify a provided OTP. <END_TASK> <USER_TASK:> Description: def verify(self, otp, timestamp=False, sl=None, timeout=None, return_response=False): """ Verify a provided OTP. :param otp: OTP to verify. :type otp: ``str`` :param timestamp: True to include request timestamp and session counter in the response. Defaults to False. :type timestamp: ``bool`` :param sl: A value indicating percentage of syncing required by client. :type sl: ``int`` or ``str`` :param timeout: Number of seconds to wait for sync responses. :type timeout: ``int`` :param return_response: True to return a response object instead of the status code. Defaults to False. :type return_response: ``bool`` :return: True is the provided OTP is valid, False if the REPLAYED_OTP status value is returned or the response message signature verification failed and None for the rest of the status values. """
ca_bundle_path = self._get_ca_bundle_path() otp = OTP(otp, self.translate_otp) rand_str = b(os.urandom(30)) nonce = base64.b64encode(rand_str, b('xz'))[:25].decode('utf-8') query_string = self.generate_query_string(otp.otp, nonce, timestamp, sl, timeout) threads = [] timeout = timeout or DEFAULT_TIMEOUT for url in self.api_urls: thread = URLThread('%s?%s' % (url, query_string), timeout, self.verify_cert, ca_bundle_path) thread.start() threads.append(thread) # Wait for a first positive or negative response start_time = time.time() while threads and (start_time + timeout) > time.time(): for thread in threads: if not thread.is_alive(): if thread.exception: raise thread.exception elif thread.response: status = self.verify_response(thread.response, otp.otp, nonce, return_response) if status: if return_response: return status else: return True threads.remove(thread) time.sleep(0.1) # Timeout or no valid response received raise Exception('NO_VALID_ANSWERS')
<SYSTEM_TASK:> Verify a provided list of OTPs. <END_TASK> <USER_TASK:> Description: def verify_multi(self, otp_list, max_time_window=DEFAULT_MAX_TIME_WINDOW, sl=None, timeout=None): """ Verify a provided list of OTPs. :param max_time_window: Maximum number of seconds which can pass between the first and last OTP generation for the OTP to still be considered valid. :type max_time_window: ``int`` """
# Create the OTP objects otps = [] for otp in otp_list: otps.append(OTP(otp, self.translate_otp)) if len(otp_list) < 2: raise ValueError('otp_list needs to contain at least two OTPs') device_ids = set() for otp in otps: device_ids.add(otp.device_id) # Check that all the OTPs contain same device id if len(device_ids) != 1: raise Exception('OTPs contain different device ids') # Now we verify the OTPs and save the server response for each OTP. # We need the server response, to retrieve the timestamp. # It's possible to retrieve this value locally, without querying the # server but in this case, user would need to provide his AES key. for otp in otps: response = self.verify(otp.otp, True, sl, timeout, return_response=True) if not response: return False otp.timestamp = int(response['timestamp']) count = len(otps) delta = otps[count - 1].timestamp - otps[0].timestamp # OTPs have an 8Hz timestamp counter so we need to divide it to get # seconds delta = delta / 8 if delta < 0: raise Exception('delta is smaller than zero. First OTP appears to ' 'be older than the last one') if delta > max_time_window: raise Exception(('More than %s seconds have passed between ' 'generating the first and the last OTP.') % (max_time_window)) return True
<SYSTEM_TASK:> Returns a query string which is sent to the validation servers. <END_TASK> <USER_TASK:> Description: def generate_query_string(self, otp, nonce, timestamp=False, sl=None, timeout=None): """ Returns a query string which is sent to the validation servers. """
data = [('id', self.client_id), ('otp', otp), ('nonce', nonce)] if timestamp: data.append(('timestamp', '1')) if sl is not None: if sl not in range(0, 101) and sl not in ['fast', 'secure']: raise Exception('sl parameter value must be between 0 and ' '100 or string "fast" or "secure"') data.append(('sl', sl)) if timeout: data.append(('timeout', timeout)) query_string = urlencode(data) if self.key: hmac_signature = self.generate_message_signature(query_string) hmac_signature = hmac_signature query_string += '&h=%s' % (hmac_signature.replace('+', '%2B')) return query_string
<SYSTEM_TASK:> Returns a response signature and query string generated from the <END_TASK> <USER_TASK:> Description: def parse_parameters_from_response(self, response): """ Returns a response signature and query string generated from the server response. 'h' aka signature argument is stripped from the returned query string. """
lines = response.splitlines() pairs = [line.strip().split('=', 1) for line in lines if '=' in line] pairs = sorted(pairs) signature = ([unquote(v) for k, v in pairs if k == 'h'] or [None])[0] # already quoted query_string = '&' . join([k + '=' + v for k, v in pairs if k != 'h']) return (signature, query_string)
<SYSTEM_TASK:> Returns query string parameters as a dictionary. <END_TASK> <USER_TASK:> Description: def get_parameters_as_dictionary(self, query_string): """ Returns query string parameters as a dictionary. """
pairs = (x.split('=', 1) for x in query_string.split('&')) return dict((k, unquote(v)) for k, v in pairs)
<SYSTEM_TASK:> Return a path to the CA bundle which is used for verifying the hosts <END_TASK> <USER_TASK:> Description: def _get_ca_bundle_path(self): """ Return a path to the CA bundle which is used for verifying the hosts SSL certificate. """
if self.ca_certs_bundle_path: # User provided a custom path return self.ca_certs_bundle_path # Return first bundle which is available for file_path in COMMON_CA_LOCATIONS: if self._is_valid_ca_bundle_file(file_path=file_path): return file_path return None
<SYSTEM_TASK:> Write one or more dump files to complete this dump. <END_TASK> <USER_TASK:> Description: def write(self, basename=None, write_separate_manifests=True): """Write one or more dump files to complete this dump. Returns the number of dump/archive files written. """
self.check_files() n = 0 for manifest in self.partition_dumps(): dumpbase = "%s%05d" % (basename, n) dumpfile = "%s.%s" % (dumpbase, self.format) if (write_separate_manifests): manifest.write(basename=dumpbase + '.xml') if (self.format == 'zip'): self.write_zip(manifest.resources, dumpfile) elif (self.format == 'warc'): self.write_warc(manifest.resources, dumpfile) else: raise DumpError( "Unknown dump format requested (%s)" % (self.format)) n += 1 self.logger.info("Wrote %d dump files" % (n)) return(n)
<SYSTEM_TASK:> Write a ZIP format dump file. <END_TASK> <USER_TASK:> Description: def write_zip(self, resources=None, dumpfile=None): """Write a ZIP format dump file. Writes a ZIP file containing the resources in the iterable resources along with a manifest file manifest.xml (written first). No checks on the size of files or total size are performed, this is expected to have been done beforehand. """
compression = (ZIP_DEFLATED if self.compress else ZIP_STORED) zf = ZipFile( dumpfile, mode="w", compression=compression, allowZip64=True) # Write resources first rdm = ResourceDumpManifest(resources=resources) real_path = {} for resource in resources: archive_path = self.archive_path(resource.path) real_path[archive_path] = resource.path resource.path = archive_path zf.writestr('manifest.xml', rdm.as_xml()) # Add all files in the resources for resource in resources: zf.write(real_path[resource.path], arcname=resource.path) zf.close() zipsize = os.path.getsize(dumpfile) self.logger.info( "Wrote ZIP file dump %s with size %d bytes" % (dumpfile, zipsize))
<SYSTEM_TASK:> Write a WARC dump file. <END_TASK> <USER_TASK:> Description: def write_warc(self, resources=None, dumpfile=None): """Write a WARC dump file. WARC support is not part of ResourceSync v1.0 (Z39.99 2014) but is left in this library for experimentation. """
# Load library late as we want to be able to run rest of code # without this installed try: from warc import WARCFile, WARCHeader, WARCRecord except: raise DumpError("Failed to load WARC library") wf = WARCFile(dumpfile, mode="w", compress=self.compress) # Add all files in the resources for resource in resources: wh = WARCHeader({}) wh.url = resource.uri wh.ip_address = None wh.date = resource.lastmod wh.content_type = 'text/plain' wh.result_code = 200 wh.checksum = 'aabbcc' wh.location = self.archive_path(resource.path) wf.write_record(WARCRecord(header=wh, payload=resource.path)) wf.close() warcsize = os.path.getsize(dumpfile) self.logging.info( "Wrote WARC file dump %s with size %d bytes" % (dumpfile, warcsize))
<SYSTEM_TASK:> Check all files in self.resources, find longest common prefix. <END_TASK> <USER_TASK:> Description: def check_files(self, set_length=True, check_length=True): """Check all files in self.resources, find longest common prefix. Go though and check all files in self.resources, add up size, and find longest common path that can be used when writing the dump file. Saved in self.path_prefix. Parameters set_length and check_length control control whether then set_length attribute should be set from the file size if not specified, and whether any length specified should be checked. By default both are True. In any event, the total size calculated is the size of files on disk. """
total_size = 0 # total size of all files in bytes path_prefix = None for resource in self.resources: if (resource.path is None): # explicit test because exception raised by getsize otherwise # confusing raise DumpError( "No file path defined for resource %s" % resource.uri) if (path_prefix is None): path_prefix = os.path.dirname(resource.path) else: path_prefix = os.path.commonprefix( [path_prefix, os.path.dirname(resource.path)]) size = os.path.getsize(resource.path) if (resource.length is not None): if (check_length and resource.length != size): raise DumpError("Size of resource %s is %d on disk, not %d as specified" % (resource.uri, size, resource.length)) elif (set_length): resource.length = size if (size > self.max_size): raise DumpError( "Size of file (%s, %d) exceeds maximum (%d) dump size" % (resource.path, size, self.max_size)) total_size += size self.path_prefix = path_prefix self.total_size = total_size self.logger.info( "Total size of files to include in dump %d bytes" % (total_size)) return True
<SYSTEM_TASK:> Yeild a set of manifest object that parition the dumps. <END_TASK> <USER_TASK:> Description: def partition_dumps(self): """Yeild a set of manifest object that parition the dumps. Simply adds resources/files to a manifest until their are either the the correct number of files or the size limit is exceeded, then yields that manifest. """
manifest = self.manifest_class() manifest_size = 0 manifest_files = 0 for resource in self.resources: manifest.add(resource) manifest_size += resource.length manifest_files += 1 if (manifest_size >= self.max_size or manifest_files >= self.max_files): yield(manifest) # Need to start a new manifest manifest = self.manifest_class() manifest_size = 0 manifest_files = 0 if (manifest_files > 0): yield(manifest)
<SYSTEM_TASK:> Return the archive path for file with real_path. <END_TASK> <USER_TASK:> Description: def archive_path(self, real_path): """Return the archive path for file with real_path. Mapping is based on removal of self.path_prefix which is determined by self.check_files(). """
if (not self.path_prefix): return(real_path) else: return(os.path.relpath(real_path, self.path_prefix))
<SYSTEM_TASK:> Attach a new engine to director <END_TASK> <USER_TASK:> Description: def reset(self): """ Attach a new engine to director """
self.scene = cocos.scene.Scene() self.z = 0 palette = config.settings['view']['palette'] #Player.palette = palette r, g, b = palette['bg'] self.scene.add(cocos.layer.ColorLayer(r, g, b, 255), z=self.z) self.z += 1 message_layer = MessageLayer() self.scene.add(message_layer, z=self.z) self.z += 1 self.world_layer = WorldLayer(self.mode_id, fn_show_message=message_layer.show_message) self.scene.add(self.world_layer, z=self.z) self.z += 1 self.director._set_scene(self.scene) # Step once to refresh before `act` self.step() # TODO: Reset to `ones`? return self.world_layer.get_state()
<SYSTEM_TASK:> Take one action for one step <END_TASK> <USER_TASK:> Description: def act(self, action): """ Take one action for one step """
# FIXME: Hack to change in return type action = int(action) assert isinstance(action, int) assert action < self.actions_num, "%r (%s) invalid"%(action, type(action)) # Reset buttons for k in self.world_layer.buttons: self.world_layer.buttons[k] = 0 # Apply each button defined in action config for key in self.world_layer.player.controls[action]: if key in self.world_layer.buttons: self.world_layer.buttons[key] = 1 # Act in the environment self.step() observation = self.world_layer.get_state() reward = self.world_layer.player.get_reward() terminal = self.world_layer.player.game_over info = {} return observation, reward, terminal, info
<SYSTEM_TASK:> Step the engine one tick <END_TASK> <USER_TASK:> Description: def step(self): """ Step the engine one tick """
self.director.window.switch_to() self.director.window.dispatch_events() self.director.window.dispatch_event('on_draw') self.director.window.flip() # Ticking before events caused glitches. pyglet.clock.tick()
<SYSTEM_TASK:> Class decorator that declares dependencies <END_TASK> <USER_TASK:> Description: def has(cls): """Class decorator that declares dependencies"""
deps = {} for i in dir(cls): if i.startswith('__') and i.endswith('__'): continue val = getattr(cls, i, None) if isinstance(val, Dependency): deps[i] = val if val.name is None: val.name = i cls.__injections__ = deps return cls
<SYSTEM_TASK:> Injects dependencies and propagates dependency injector <END_TASK> <USER_TASK:> Description: def inject(self, inst, **renames): """Injects dependencies and propagates dependency injector"""
if renames: di = self.clone(**renames) else: di = self pro = di._provides inst.__injections_source__ = di deps = getattr(inst, '__injections__', None) if deps: for attr, dep in deps.items(): val = pro.get(dep.name) if val is None: raise MissingDependencyError(dep.name) if not isinstance(val, dep.type): raise TypeError("Wrong provider for {!r}".format(val)) setattr(inst, attr, val) meth = getattr(inst, '__injected__', None) if meth is not None: meth() return inst
<SYSTEM_TASK:> Propagate dependencies for provided instances <END_TASK> <USER_TASK:> Description: def interconnect_all(self): """Propagate dependencies for provided instances"""
for dep in topologically_sorted(self._provides): if hasattr(dep, '__injections__') and not hasattr(dep, '__injections_source__'): self.inject(dep)
<SYSTEM_TASK:> Given some coefficients, return a the derivative of a certain kind of <END_TASK> <USER_TASK:> Description: def derivatives_factory(cls, coef, domain, kind, **kwargs): """ Given some coefficients, return a the derivative of a certain kind of orthogonal polynomial defined over a specific domain. """
basis_polynomial = cls._basis_polynomial_factory(kind) return basis_polynomial(coef, domain).deriv()
<SYSTEM_TASK:> Given some coefficients, return a certain kind of orthogonal polynomial <END_TASK> <USER_TASK:> Description: def functions_factory(cls, coef, domain, kind, **kwargs): """ Given some coefficients, return a certain kind of orthogonal polynomial defined over a specific domain. """
basis_polynomial = cls._basis_polynomial_factory(kind) return basis_polynomial(coef, domain)
<SYSTEM_TASK:> Return optimal collocation nodes for some orthogonal polynomial. <END_TASK> <USER_TASK:> Description: def roots(cls, degree, domain, kind): """Return optimal collocation nodes for some orthogonal polynomial."""
basis_coefs = cls._basis_monomial_coefs(degree) basis_poly = cls.functions_factory(basis_coefs, domain, kind) return basis_poly.roots()
<SYSTEM_TASK:> Add a capability list. <END_TASK> <USER_TASK:> Description: def add_capability_list(self, capability_list=None): """Add a capability list. Adds either a CapabiltyList object specified in capability_list or else creates a Resource with the URI given in capability_list and adds that to the Source Description """
if (hasattr(capability_list, 'uri')): r = Resource(uri=capability_list.uri, capability=capability_list.capability_name) if (capability_list.describedby is not None): r.link_set(rel='describedby', href=capability_list.describedby) else: r = Resource(uri=capability_list, capability='capabilitylist') self.add(r)
<SYSTEM_TASK:> Verify OAuth token and set current user on request stack. <END_TASK> <USER_TASK:> Description: def verify_oauth_token_and_set_current_user(): """Verify OAuth token and set current user on request stack. This function should be used **only** on REST application. .. code-block:: python app.before_request(verify_oauth_token_and_set_current_user) """
for func in oauth2._before_request_funcs: func() if not hasattr(request, 'oauth') or not request.oauth: scopes = [] try: valid, req = oauth2.verify_request(scopes) except ValueError: abort(400, 'Error trying to decode a non urlencoded string.') for func in oauth2._after_request_funcs: valid, req = func(valid, req) if valid: request.oauth = req
<SYSTEM_TASK:> Return list of scope choices. <END_TASK> <USER_TASK:> Description: def scope_choices(self, exclude_internal=True): """Return list of scope choices. :param exclude_internal: Exclude internal scopes or not. (Default: ``True``) :returns: A list of tuples (id, scope). """
return [ (k, scope) for k, scope in sorted(self.scopes.items()) if not exclude_internal or not scope.is_internal ]
<SYSTEM_TASK:> Register a scope. <END_TASK> <USER_TASK:> Description: def register_scope(self, scope): """Register a scope. :param scope: A :class:`invenio_oauth2server.models.Scope` instance. """
if not isinstance(scope, Scope): raise TypeError("Invalid scope type.") assert scope.id not in self.scopes self.scopes[scope.id] = scope
<SYSTEM_TASK:> Monkeypatch OAuthlib set of "URL encoded"-safe characters. <END_TASK> <USER_TASK:> Description: def monkeypatch_oauthlib_urlencode_chars(chars): """Monkeypatch OAuthlib set of "URL encoded"-safe characters. .. note:: OAuthlib keeps a set of characters that it considers as valid inside an URL-encoded query-string during parsing of requests. The issue is that this set of characters wasn't designed to be configurable since it should technically follow various RFC specifications about URIs, like for example `RFC3986 <https://www.ietf.org/rfc/rfc3986.txt>`_. Many online services and frameworks though have designed their APIs in ways that aim at keeping things practical and readable to the API consumer, making use of special characters to mark or seperate query-string arguments. Such an example is the usage of embedded JSON strings inside query-string arguments, which of course have to contain the "colon" character (:) for key/value pair definitions. Users of the OAuthlib library, in order to integrate with these services and frameworks, end up either circumventing these "static" restrictions of OAuthlib by pre-processing query-strings, or -in search of a more permanent solution- directly make Pull Requests to OAuthlib to include additional characters in the set, and explain the logic behind their decision (one can witness these efforts inside the git history of the source file that includes this set of characters `here <https://github.com/idan/oauthlib/commits/master/oauthlib/common.py>`_). This kind of tactic leads easily to misconceptions about the ability one has over the usage of specific features of services and frameworks. In order to tackle this issue in Invenio-OAuth2Server, we are monkey-patching this set of characters using a configuration variable, so that usage of any special characters is a conscious decision of the package user. """
modified_chars = set(chars) always_safe = set(oauthlib_commmon.always_safe) original_special_chars = oauthlib_commmon.urlencoded - always_safe if modified_chars != original_special_chars: warnings.warn( 'You are overriding the default OAuthlib "URL encoded" set of ' 'valid characters. Make sure that the characters defined in ' 'oauthlib.common.urlencoded are indeed limitting your needs.', RuntimeWarning ) oauthlib_commmon.urlencoded = always_safe | modified_chars
<SYSTEM_TASK:> Compute the score of a text by using the frequencies of ngrams. <END_TASK> <USER_TASK:> Description: def NgramScorer(frequency_map): """Compute the score of a text by using the frequencies of ngrams. Example: >>> fitness = NgramScorer(english.unigrams) >>> fitness("ABC") -4.3622319742618245 Args: frequency_map (dict): ngram to frequency mapping """
# Calculate the log probability length = len(next(iter(frequency_map))) # TODO: 0.01 is a magic number. Needs to be better than that. floor = math.log10(0.01 / sum(frequency_map.values())) ngrams = frequency.frequency_to_probability(frequency_map, decorator=math.log10) def inner(text): # I dont like this, it is only for the .upper() to work, # But I feel as though this can be removed in later refactoring text = ''.join(text) text = remove(text.upper(), string.whitespace + string.punctuation) return sum(ngrams.get(ngram, floor) for ngram in iterate_ngrams(text, length)) return inner
<SYSTEM_TASK:> Specific add function for capabilities. <END_TASK> <USER_TASK:> Description: def add_capability(self, capability=None, uri=None, name=None): """Specific add function for capabilities. Takes either: - a capability object (derived from ListBase) as the first argument from which the capability name is extracted, and the URI if given - or a plain name string and - the URI of the capability """
if (capability is not None): name = capability.capability_name if (capability.uri is not None): uri = capability.uri self.add(Resource(uri=uri, capability=name))
<SYSTEM_TASK:> Return information about the requested capability from this list. <END_TASK> <USER_TASK:> Description: def capability_info(self, name=None): """Return information about the requested capability from this list. Will return None if there is no information about the requested capability. """
for r in self.resources: if (r.capability == name): return(r) return(None)
<SYSTEM_TASK:> Create the base UI in command mode. <END_TASK> <USER_TASK:> Description: def ui_main(fmt_table, node_dict): """Create the base UI in command mode."""
cmd_funct = {"quit": False, "run": node_cmd, "stop": node_cmd, "connect": node_cmd, "details": node_cmd, "update": True} ui_print("\033[?25l") # cursor off print("{}\n".format(fmt_table)) sys.stdout.flush() # refresh_main values: # None = loop main-cmd, True = refresh-list, False = exit-program refresh_main = None while refresh_main is None: cmd_name = get_user_cmd(node_dict) if callable(cmd_funct[cmd_name]): refresh_main = cmd_funct[cmd_name](cmd_name, node_dict) else: refresh_main = cmd_funct[cmd_name] if cmd_name != "connect" and refresh_main: ui_clear(len(node_dict) + 2) return refresh_main
<SYSTEM_TASK:> Process commands that target specific nodes. <END_TASK> <USER_TASK:> Description: def node_cmd(cmd_name, node_dict): """Process commands that target specific nodes."""
sc = {"run": cmd_startstop, "stop": cmd_startstop, "connect": cmd_connect, "details": cmd_details} node_num = node_selection(cmd_name, len(node_dict)) refresh_main = None if node_num != 0: (node_valid, node_info) = node_validate(node_dict, node_num, cmd_name) if node_valid: sub_cmd = sc[cmd_name] # get sub-command refresh_main = sub_cmd(node_dict[node_num], cmd_name, node_info) else: # invalid target ui_print_suffix(node_info, C_ERR) sleep(1.5) else: # '0' entered - exit command but not program ui_print(" - Exit Command") sleep(0.5) return refresh_main
<SYSTEM_TASK:> Validate that command can be performed on target node. <END_TASK> <USER_TASK:> Description: def node_validate(node_dict, node_num, cmd_name): """Validate that command can be performed on target node."""
# cmd: [required-state, action-to-displayed, error-statement] req_lu = {"run": ["stopped", "Already Running"], "stop": ["running", "Already Stopped"], "connect": ["running", "Can't Connect, Node Not Running"], "details": [node_dict[node_num].state, ""]} tm = {True: ("Node {1}{2}{0} ({5}{3}{0} on {1}{4}{0})". format(C_NORM, C_WARN, node_num, node_dict[node_num].name, node_dict[node_num].cloud_disp, C_TI)), False: req_lu[cmd_name][1]} node_valid = bool(req_lu[cmd_name][0] == node_dict[node_num].state) node_info = tm[node_valid] return node_valid, node_info
<SYSTEM_TASK:> Confirm command and execute it. <END_TASK> <USER_TASK:> Description: def cmd_startstop(node, cmd_name, node_info): """Confirm command and execute it."""
cmd_lu = {"run": ["ex_start_node", "wait_until_running", "RUNNING"], "stop": ["ex_stop_node", "", "STOPPING"]} # specific delay & message {provider: {command: [delay, message]}} cld_lu = {"azure": {"stop": [6, "Initiated"]}, "aws": {"stop": [6, "Initiated"]}} conf_mess = ("\r{0}{1}{2} {3} - Confirm [y/N]: ". format(C_STAT[cmd_name.upper()], cmd_name.upper(), C_NORM, node_info)) cmd_result = None if input_yn(conf_mess): exec_mess = ("\r{0}{1}{2} {3}: ". format(C_STAT[cmd_name.upper()], cmd_lu[cmd_name][2], C_NORM, node_info)) ui_erase_ln() ui_print(exec_mess) busy_obj = busy_disp_on() # busy indicator ON node_drv = getattr(node, "driver") main_cmd = getattr(node_drv, cmd_lu[cmd_name][0]) response = main_cmd(node) # noqa cmd_wait = cmd_lu[cmd_name][1] if cmd_wait: seccmd = getattr(node_drv, cmd_wait) response = seccmd([node]) # noqa delay, cmd_end = cld_lu.get(node.cloud, {}).get(cmd_name, [0, "Successful"]) sleep(delay) busy_disp_off(busy_obj) # busy indicator OFF ui_print("\033[D") # remove extra space cmd_result = True ui_print_suffix("{0} {1}".format(cmd_name.title(), cmd_end), C_GOOD) sleep(1.5) else: ui_print_suffix("Command Aborted") sleep(0.75) return cmd_result
<SYSTEM_TASK:> Connect to node. <END_TASK> <USER_TASK:> Description: def cmd_connect(node, cmd_name, node_info): """Connect to node."""
# FUTURE: call function to check for custom connection-info conn_info = "Defaults" conf_mess = ("\r{0}{1} TO{2} {3} using {5}{4}{2} - Confirm [y/N]: ". format(C_STAT[cmd_name.upper()], cmd_name.upper(), C_NORM, node_info, conn_info, C_HEAD2)) cmd_result = None if input_yn(conf_mess): exec_mess = ("\r{0}CONNECTING TO{1} {2} using {4}{3}{1}: ". format(C_STAT[cmd_name.upper()], C_NORM, node_info, conn_info, C_HEAD2)) ui_erase_ln() ui_print(exec_mess) (ssh_user, ssh_key) = ssh_get_info(node) if ssh_user: ssh_cmd = "ssh {0}{1}@{2}".format(ssh_key, ssh_user, node.public_ips) else: ssh_cmd = "ssh {0}{1}".format(ssh_key, node.public_ips) print("\n") ui_print("\033[?25h") # cursor on subprocess.call(ssh_cmd, shell=True) ui_print("\033[?25l") # cursor off print() cmd_result = True else: ui_print_suffix("Command Aborted") sleep(0.75) return cmd_result
<SYSTEM_TASK:> Determine ssh-user and ssh-key for node. <END_TASK> <USER_TASK:> Description: def ssh_get_info(node): """Determine ssh-user and ssh-key for node."""
ssh_key = "" if node.cloud == "aws": raw_key = node.extra['key_name'] ssh_key = "-i {0}{1}.pem ".format(CONFIG_DIR, raw_key) ssh_user = ssh_calc_aws(node) elif node.cloud == "azure": ssh_user = node.extra['properties']['osProfile']['adminUsername'] elif node.cloud == "gcp": items = node.extra['metadata'].get('items', [{}]) keyname = items['key' == 'ssh-keys'].get('value', "") pos = keyname.find(":") ssh_user = keyname[0:pos] elif node.cloud == "alicloud": ssh_user = "" return ssh_user, ssh_key
<SYSTEM_TASK:> Flush the input buffer on posix and windows. <END_TASK> <USER_TASK:> Description: def input_flush(): """Flush the input buffer on posix and windows."""
try: import sys, termios # noqa termios.tcflush(sys.stdin, termios.TCIFLUSH) except ImportError: import msvcrt while msvcrt.kbhit(): msvcrt.getch()
<SYSTEM_TASK:> Get user for grant type password. <END_TASK> <USER_TASK:> Description: def get_user(email, password, *args, **kwargs): """Get user for grant type password. Needed for grant type 'password'. Note, grant type password is by default disabled. :param email: User email. :param password: Password. :returns: The user instance or ``None``. """
user = datastore.find_user(email=email) if user and user.active and verify_password(password, user.password): return user
<SYSTEM_TASK:> Load an access token. <END_TASK> <USER_TASK:> Description: def get_token(access_token=None, refresh_token=None): """Load an access token. Add support for personal access tokens compared to flask-oauthlib. If the access token is ``None``, it looks for the refresh token. :param access_token: The access token. (Default: ``None``) :param refresh_token: The refresh token. (Default: ``None``) :returns: The token instance or ``None``. """
if access_token: t = Token.query.filter_by(access_token=access_token).first() if t and t.is_personal and t.user.active: t.expires = datetime.utcnow() + timedelta( seconds=int(current_app.config.get( 'OAUTH2_PROVIDER_TOKEN_EXPIRES_IN' )) ) elif refresh_token: t = Token.query.join(Token.client).filter( Token.refresh_token == refresh_token, Token.is_personal == False, # noqa Client.is_confidential == True, ).first() else: return None return t if t and t.user.active else None
<SYSTEM_TASK:> Load the client. <END_TASK> <USER_TASK:> Description: def get_client(client_id): """Load the client. Needed for grant_type client_credentials. Add support for OAuth client_credentials access type, with user inactivation support. :param client_id: The client ID. :returns: The client instance or ``None``. """
client = Client.query.get(client_id) if client and client.user.active: return client
<SYSTEM_TASK:> Token persistence. <END_TASK> <USER_TASK:> Description: def save_token(token, request, *args, **kwargs): """Token persistence. :param token: A dictionary with the token data. :param request: The request instance. :returns: A :class:`invenio_oauth2server.models.Token` instance. """
# Exclude the personal access tokens which doesn't expire. user = request.user if request.user else current_user # Add user information in token endpoint response. # Currently, this is the only way to have the access to the user of the # token as well as the token response. token.update(user={'id': user.get_id()}) # Add email if scope granted. if email_scope.id in token.scopes: token['user'].update( email=user.email, email_verified=user.confirmed_at is not None, ) tokens = Token.query.filter_by( client_id=request.client.client_id, user_id=user.id, is_personal=False, ) # make sure that every client has only one token connected to a user if tokens: for tk in tokens: db.session.delete(tk) db.session.commit() expires_in = token.get('expires_in') expires = datetime.utcnow() + timedelta(seconds=int(expires_in)) tok = Token( access_token=token['access_token'], refresh_token=token.get('refresh_token'), token_type=token['token_type'], _scopes=token['scope'], expires=expires, client_id=request.client.client_id, user_id=user.id, is_personal=False, ) db.session.add(tok) db.session.commit() return tok
<SYSTEM_TASK:> Log in a user after having been verified. <END_TASK> <USER_TASK:> Description: def login_oauth2_user(valid, oauth): """Log in a user after having been verified."""
if valid: oauth.user.login_via_oauth2 = True _request_ctx_stack.top.user = oauth.user identity_changed.send(current_app._get_current_object(), identity=Identity(oauth.user.id)) return valid, oauth
<SYSTEM_TASK:> Verify the JWT token. <END_TASK> <USER_TASK:> Description: def jwt_verify_token(headers): """Verify the JWT token. :param dict headers: The request headers. :returns: The token data. :rtype: dict """
# Get the token from headers token = headers.get( current_app.config['OAUTH2SERVER_JWT_AUTH_HEADER'] ) if token is None: raise JWTInvalidHeaderError # Get authentication type authentication_type = \ current_app.config['OAUTH2SERVER_JWT_AUTH_HEADER_TYPE'] # Check if the type should be checked if authentication_type is not None: # Get the prefix and the token prefix, token = token.split() # Check if the type matches if prefix != authentication_type: raise JWTInvalidHeaderError try: # Get the token data decode = jwt_decode_token(token) # Check the integrity of the user if current_user.get_id() != decode.get('sub'): raise JWTInvalidIssuer return decode except _JWTDecodeError as exc: raise_from(JWTDecodeError(), exc) except _JWTExpiredToken as exc: raise_from(JWTExpiredToken(), exc)
<SYSTEM_TASK:> View for rendering authorization request. <END_TASK> <USER_TASK:> Description: def authorize(*args, **kwargs): """View for rendering authorization request."""
if request.method == 'GET': client = Client.query.filter_by( client_id=kwargs.get('client_id') ).first() if not client: abort(404) scopes = current_oauth2server.scopes ctx = dict( client=client, oauth_request=kwargs.get('request'), scopes=[scopes[x] for x in kwargs.get('scopes', [])], ) return render_template('invenio_oauth2server/authorize.html', **ctx) confirm = request.form.get('confirm', 'no') return confirm == 'yes'
<SYSTEM_TASK:> Test to verify that you have been authenticated. <END_TASK> <USER_TASK:> Description: def info(): """Test to verify that you have been authenticated."""
if current_app.testing or current_app.debug: return jsonify(dict( user=request.oauth.user.id, client=request.oauth.client.client_id, scopes=list(request.oauth.scopes) )) else: abort(404)
<SYSTEM_TASK:> Rank all key periods for ``ciphertext`` up to and including ``max_key_period`` <END_TASK> <USER_TASK:> Description: def key_periods(ciphertext, max_key_period): """Rank all key periods for ``ciphertext`` up to and including ``max_key_period`` Example: >>> key_periods(ciphertext, 30) [2, 4, 8, 3, ...] Args: ciphertext (str): The text to analyze max_key_period (int): The maximum period the key could be Returns: Sorted list of keys Raises: ValueError: If max_key_period is less than or equal to 0 """
if max_key_period <= 0: raise ValueError("max_key_period must be a positive integer") key_scores = [] for period in range(1, min(max_key_period, len(ciphertext)) + 1): score = abs(ENGLISH_IC - index_of_coincidence(*split_columns(ciphertext, period))) key_scores.append((period, score)) return [p[0] for p in sorted(key_scores, key=lambda x: x[1])]
<SYSTEM_TASK:> INTERACTIVE exploration source capabilities. <END_TASK> <USER_TASK:> Description: def explore(self): """INTERACTIVE exploration source capabilities. Will use sitemap URI taken either from explicit self.sitemap_name or derived from the mappings supplied. """
# Where do we start? Build options in starts which has entries # that are a pair comprised of the uri and a list of acceptable # capabilities starts = [] if (self.sitemap_name is not None): print("Starting from explicit --sitemap %s" % (self.sitemap_name)) starts.append(XResource(self.sitemap_name)) elif (len(self.mapper) > 0): uri = self.mapper.default_src_uri() (scheme, netloc, path, params, query, fragment) = urlparse(uri) if (not scheme and not netloc): if (os.path.isdir(path)): # have a dir, look for 'likely' file names print( "Looking for capability documents in local directory %s" % (path)) for name in ['resourcesync', 'capabilities.xml', 'resourcelist.xml', 'changelist.xml']: file = os.path.join(path, name) if (os.path.isfile(file)): starts.append(XResource(file)) if (len(starts) == 0): raise ClientFatalError("No likely capability files found in local directory %s" % (path)) else: # local file, might be anything (or not exist) print("Starting from local file %s" % (path)) starts.append(XResource(path)) else: # remote, can't tell whether we have a sitemap or a server name or something # else, build list of options depending on whether there is a path and whether # there is an extension/name well_known = urlunparse( [scheme, netloc, '/.well-known/resourcesync', '', '', '']) if (not path): # root, just look for .well-known starts.append( XResource( well_known, [ 'capabilitylist', 'capabilitylistindex'])) else: starts.append(XResource(uri)) starts.append( XResource( well_known, [ 'capabilitylist', 'capabilitylistindex'])) print("Looking for discovery information based on mappings") else: raise ClientFatalError( "No source information (server base uri or capability uri) specified, use -h for help") # # Have list of one or more possible starting point, try them in turn try: for start in starts: # For each starting point we create a fresh history history = [start] input = None while (len(history) > 0): print() xr = history.pop() new_xr = self.explore_uri(xr, len(history) > 0) if (new_xr): # Add current and new to history history.append(xr) history.append(new_xr) except ExplorerQuit: pass # expected way to exit print("\nresync-explorer done, bye...\n")
<SYSTEM_TASK:> Do HEAD on uri and show infomation. <END_TASK> <USER_TASK:> Description: def explore_show_head(self, uri, check_headers=None): """Do HEAD on uri and show infomation. Will also check headers against any values specified in check_headers. """
print("HEAD %s" % (uri)) if (re.match(r'^\w+:', uri)): # Looks like a URI response = requests.head(uri) else: # Mock up response if we have a local file response = self.head_on_file(uri) print(" status: %s" % (response.status_code)) if (response.status_code == '200'): # print some of the headers for header in ['content-length', 'last-modified', 'lastmod', 'content-type', 'etag']: if header in response.headers: check_str = '' if (check_headers is not None and header in check_headers): if (response.headers[header] == check_headers[header]): check_str = ' MATCHES EXPECTED VALUE' else: check_str = ' EXPECTED %s' % ( check_headers[header]) print( " %s: %s%s" % (header, response.headers[header], check_str))
<SYSTEM_TASK:> Return list of allowed entries for given capability document. <END_TASK> <USER_TASK:> Description: def allowed_entries(self, capability): """Return list of allowed entries for given capability document. Includes handling of capability = *index where the only acceptable entries are *. """
index = re.match(r'(.+)index$', capability) archive = re.match(r'(.+)\-archive$', capability) if (capability == 'capabilitylistindex'): return([]) # not allowed so no valid references elif (index): return([index.group(1)]) # name without index ending elif (archive): return([archive.group(1)]) # name without -archive ending elif (capability == 'description'): return(['capabilitylist']) elif (capability == 'capabilitylist'): return(['resourcelist', 'resourcedump', 'changelist', 'changedump', 'resourcelist-archive', 'resourcedump-archive', 'changelist-archive', 'changedump-archive']) return([])
<SYSTEM_TASK:> If uri is relative then expand in context. <END_TASK> <USER_TASK:> Description: def expand_relative_uri(self, context, uri): """If uri is relative then expand in context. Prints warning if expansion happens. """
full_uri = urljoin(context, uri) if (full_uri != uri): print(" WARNING - expanded relative URI to %s" % (full_uri)) uri = full_uri return(uri)
<SYSTEM_TASK:> Modular hill climbing algorithm. <END_TASK> <USER_TASK:> Description: def hill_climb(nsteps, start_node, get_next_node): """Modular hill climbing algorithm. Example: >>> def get_next_node(node): ... a, b = random.sample(range(len(node)), 2) ... node[a], node[b] = node[b], node[a] ... plaintext = decrypt(node, ciphertext) ... score = lantern.score(plaintext, *fitness_functions) ... return node, score, Decryption(plaintext, ''.join(node), score) >>> final_node, best_score, outputs = hill_climb(10, "ABC", get_next_node) Args: nsteps (int): The number of neighbours to visit start_node: The starting node get_next_node (function): Function to return the next node the score of the current node and any optional output from the current node Returns: The highest node found, the score of this node and the outputs from the best nodes along the way """
outputs = [] best_score = -float('inf') for step in range(nsteps): next_node, score, output = get_next_node(copy.deepcopy(start_node)) # Keep track of best score and the start node becomes finish node if score > best_score: start_node = copy.deepcopy(next_node) best_score = score outputs.append(output) return start_node, best_score, outputs
<SYSTEM_TASK:> Validate and store redirect URIs for client. <END_TASK> <USER_TASK:> Description: def redirect_uris(self, value): """Validate and store redirect URIs for client."""
if isinstance(value, six.text_type): value = value.split("\n") value = [v.strip() for v in value] for v in value: validate_redirect_uri(v) self._redirect_uris = "\n".join(value) or ""
<SYSTEM_TASK:> Set scopes. <END_TASK> <USER_TASK:> Description: def scopes(self, scopes): """Set scopes. :param scopes: The list of scopes. """
validate_scopes(scopes) self._scopes = " ".join(set(scopes)) if scopes else ""
<SYSTEM_TASK:> Get list of non-internal scopes for token. <END_TASK> <USER_TASK:> Description: def get_visible_scopes(self): """Get list of non-internal scopes for token. :returns: A list of scopes. """
return [k for k, s in current_oauth2server.scope_choices() if k in self.scopes]
<SYSTEM_TASK:> Create a personal access token. <END_TASK> <USER_TASK:> Description: def create_personal(cls, name, user_id, scopes=None, is_internal=False): """Create a personal access token. A token that is bound to a specific user and which doesn't expire, i.e. similar to the concept of an API key. :param name: Client name. :param user_id: User ID. :param scopes: The list of permitted scopes. (Default: ``None``) :param is_internal: If ``True`` it's a internal access token. (Default: ``False``) :returns: A new access token. """
with db.session.begin_nested(): scopes = " ".join(scopes) if scopes else "" c = Client( name=name, user_id=user_id, is_internal=True, is_confidential=False, _default_scopes=scopes ) c.gen_salt() t = Token( client_id=c.client_id, user_id=user_id, access_token=gen_salt( current_app.config.get( 'OAUTH2SERVER_TOKEN_PERSONAL_SALT_LEN') ), expires=None, _scopes=scopes, is_personal=True, is_internal=is_internal, ) db.session.add(c) db.session.add(t) return t
<SYSTEM_TASK:> Validate a redirect URI. <END_TASK> <USER_TASK:> Description: def validate_redirect_uri(value): """Validate a redirect URI. Redirect URIs must be a valid URL and use https unless the host is localhost for which http is accepted. :param value: The redirect URI. """
sch, netloc, path, par, query, fra = urlparse(value) if not (sch and netloc): raise InvalidRedirectURIError() if sch != 'https': if ':' in netloc: netloc, port = netloc.split(':', 1) if not (netloc in ('localhost', '127.0.0.1') and sch == 'http'): raise InsecureTransportError()
<SYSTEM_TASK:> Validate if each element in a list is a registered scope. <END_TASK> <USER_TASK:> Description: def validate_scopes(value_list): """Validate if each element in a list is a registered scope. :param value_list: The list of scopes. :raises invenio_oauth2server.errors.ScopeDoesNotExists: The exception is raised if a scope is not registered. :returns: ``True`` if it's successfully validated. """
for value in value_list: if value not in current_oauth2server.scopes: raise ScopeDoesNotExists(value) return True
<SYSTEM_TASK:> Return modhex interpretation of the provided OTP. <END_TASK> <USER_TASK:> Description: def get_otp_modehex_interpretation(self, otp): """ Return modhex interpretation of the provided OTP. If there are multiple interpretations available, first one is used, because if the OTP uses all 16 characters in its alphabet there is only one possible interpretation of that OTP. :return: Modhex interpretation of the OTP. :rtype: ``str`` """
try: interpretations = translate(u(otp)) except Exception: return otp if len(interpretations) == 0: return otp elif len(interpretations) > 1: # If there are multiple interpretations first try to use the same # translation as the input OTP. If the one is not found, use the # random interpretation. if u(otp) in interpretations: return otp return interpretations.pop()
<SYSTEM_TASK:> Construct a shift function from an alphabet. <END_TASK> <USER_TASK:> Description: def make_shift_function(alphabet): """Construct a shift function from an alphabet. Examples: Shift cases independently >>> make_shift_function([string.ascii_uppercase, string.ascii_lowercase]) <function make_shift_function.<locals>.shift_case_sensitive> Additionally shift punctuation characters >>> make_shift_function([string.ascii_uppercase, string.ascii_lowercase, string.punctuation]) <function make_shift_function.<locals>.shift_case_sensitive> Shift entire ASCII range, overflowing cases >>> make_shift_function([''.join(chr(x) for x in range(32, 127))]) <function make_shift_function.<locals>.shift_case_sensitive> Args: alphabet (iterable): Ordered iterable of strings representing separate cases of an alphabet Returns: Function (shift, symbol) """
def shift_case_sensitive(shift, symbol): case = [case for case in alphabet if symbol in case] if not case: return symbol case = case[0] index = case.index(symbol) return case[(index - shift) % len(case)] return shift_case_sensitive
<SYSTEM_TASK:> Break ``ciphertext`` by enumerating keys between ``min_key`` and ``max_key``. <END_TASK> <USER_TASK:> Description: def crack(ciphertext, *fitness_functions, min_key=0, max_key=26, shift_function=shift_case_english): """Break ``ciphertext`` by enumerating keys between ``min_key`` and ``max_key``. Example: >>> decryptions = crack("KHOOR", fitness.english.quadgrams) >>> print(''.join(decryptions[0].plaintext)) HELLO Args: ciphertext (iterable): The symbols to decrypt *fitness_functions (variable length argument list): Functions to score decryption with Keyword Args: min_key (int): Key to start with max_key (int): Key to stop at (exclusive) shift_function (function(shift, symbol)): Shift function to use Returns: Sorted list of decryptions Raises: ValueError: If min_key exceeds max_key ValueError: If no fitness_functions are given """
if min_key >= max_key: raise ValueError("min_key cannot exceed max_key") decryptions = [] for key in range(min_key, max_key): plaintext = decrypt(key, ciphertext, shift_function=shift_function) decryptions.append(Decryption(plaintext, key, score(plaintext, *fitness_functions))) return sorted(decryptions, reverse=True)
<SYSTEM_TASK:> Decrypt Shift enciphered ``ciphertext`` using ``key``. <END_TASK> <USER_TASK:> Description: def decrypt(key, ciphertext, shift_function=shift_case_english): """Decrypt Shift enciphered ``ciphertext`` using ``key``. Examples: >>> ''.join(decrypt(3, "KHOOR")) HELLO >> decrypt(15, [0xcf, 0x9e, 0xaf, 0xe0], shift_bytes) [0xde, 0xad, 0xbe, 0xef] Args: key (int): The shift to use ciphertext (iterable): The symbols to decrypt shift_function (function (shift, symbol)): Shift function to apply to symbols in the ciphertext Returns: Decrypted ciphertext, list of plaintext symbols """
return [shift_function(key, symbol) for symbol in ciphertext]
<SYSTEM_TASK:> Attempts to decode an HTML string into unicode. <END_TASK> <USER_TASK:> Description: def unicode_from_html(content): """Attempts to decode an HTML string into unicode. If unsuccessful, the original content is returned. """
encodings = get_encodings_from_content(content) for encoding in encodings: try: return unicode(content, encoding) except (UnicodeError, TypeError): pass return content
<SYSTEM_TASK:> Load your libspotify application key file. <END_TASK> <USER_TASK:> Description: def load_application_key_file(self, filename=b'spotify_appkey.key'): """Load your libspotify application key file. If called without arguments, it tries to read ``spotify_appkey.key`` from the current working directory. This is an alternative to setting :attr:`application_key` yourself. The file must be a binary key file, not the C code key file that can be compiled into an application. """
with open(filename, 'rb') as fh: self.app_key = fh.read()
<SYSTEM_TASK:> Lays the text out into separate lines and calculates their <END_TASK> <USER_TASK:> Description: def _do_layout(self, data): """ Lays the text out into separate lines and calculates their total height. """
c = data['output'] word_space = c.text_width( ' ', font_name=self.font_name, font_size=self.font_size) # Arrange the text as words on lines self._layout = [[]] x = self.font_size if self.paragraph_indent else 0 for word in self.text.split(): ww = c.text_width( word, font_name=self.font_name, font_size=self.font_size) if x + ww > self.width: # Newline x = 0 self._layout.append([]) self._layout[-1].append(word) x += ww + word_space # Work out the height we need num_lines = len(self._layout) self.height = ( num_lines * self.font_size + (num_lines-1)*(self.font_size * (self.leading - 1.0)) )
<SYSTEM_TASK:> Iterates over the response data. This avoids reading the content <END_TASK> <USER_TASK:> Description: def iter_content(self, chunk_size=10 * 1024, decode_unicode=None): """Iterates over the response data. This avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. """
if self._content_consumed: raise RuntimeError( 'The content for this response was already consumed' ) def generate(): while 1: chunk = self.raw.read(chunk_size) if not chunk: break yield chunk self._content_consumed = True gen = generate() if 'gzip' in self.headers.get('content-encoding', ''): gen = stream_decode_gzip(gen) if decode_unicode is None: decode_unicode = self.config.get('decode_unicode') if decode_unicode: gen = stream_decode_response_unicode(gen, self) return gen
<SYSTEM_TASK:> Get a descriptor out of a file object. <END_TASK> <USER_TASK:> Description: def _get_fd(fileobj): """ Get a descriptor out of a file object. :param fileobj: An integer (existing descriptor) or any object having the `fileno()` method. :raises ValueError: if the descriptor cannot be obtained or if the descriptor is invalid :returns: file descriptor number """
if isinstance(fileobj, int): fd = fileobj else: try: fd = fileobj.fileno() except AttributeError: fd = None if fd is None or fd < 0: raise ValueError("invalid fileobj: {!r}".format(fileobj)) return fd
<SYSTEM_TASK:> Remove interest in IO events from the specified fileobj <END_TASK> <USER_TASK:> Description: def unregister(self, fileobj): """ Remove interest in IO events from the specified fileobj :param fileobj: Any existing file-like object that has a fileno() method and was previously registered with :meth:`register()` :raises ValueError: if `fileobj` is invalid or not supported :raises KeyError: if the descriptor associated with `fileobj` is not registered. :returns: A :class:`SelectorKey` associated with the passed arguments """
fd = _get_fd(fileobj) key = self._fd_map[fd] try: self._epoll.unregister(fd) except OSError: pass del self._fd_map[fd] return key
<SYSTEM_TASK:> Modify interest in specified IO events on the specified file object <END_TASK> <USER_TASK:> Description: def modify(self, fileobj, events, data=None): """ Modify interest in specified IO events on the specified file object :param fileobj: Any existing file-like object that has a fileno() method :param events: A bitmask composed of EVENT_READ and EVENT_WRITE :param data: (optional) Arbitrary data :raises ValueError: if `fileobj` is invalid or not supported :raises KeyError: if the descriptor associated with `fileobj` is not registered. :returns: The new :class:`SelectorKey` associated with the passed arguments """
fd = _get_fd(fileobj) epoll_events = _EpollSelectorEvents(events).get_epoll_events() if fd not in self._fd_map: raise KeyError("{!r} is not registered".format(fileobj)) key = SelectorKey(fileobj, fd, events, data) self._fd_map[fd] = key self._epoll.modify(fd, epoll_events) return key
<SYSTEM_TASK:> Wait until one or more of the registered file objects becomes ready <END_TASK> <USER_TASK:> Description: def select(self, timeout=None): """ Wait until one or more of the registered file objects becomes ready or until the timeout expires. :param timeout: maximum wait time, in seconds (see below for special meaning) :returns: A list of pairs (key, events) for each ready file object. Note that the list may be empty if non-blocking behavior is selected or if the blocking wait is interrupted by a signal. The timeout argument has two additional special cases: 1) If timeout is None then the call will block indefinitely 2) If timeout <= 0 the call will never block """
if timeout is None: epoll_timeout = -1 elif timeout <= 0: epoll_timeout = 0 else: epoll_timeout = timeout max_events = len(self._fd_map) or -1 result = [] for fd, epoll_events in self._epoll.poll(epoll_timeout, max_events): key = self._fd_map.get(fd) events = _EpollSelectorEvents.from_epoll_events(epoll_events) events &= key.events if key: result.append((key, _EpollSelectorEvents(events))) return result
<SYSTEM_TASK:> Authenticate to Spotify's servers. <END_TASK> <USER_TASK:> Description: def login(self, username, password=None, blob=None, zeroconf=None): """Authenticate to Spotify's servers. You can login with one of three combinations: - ``username`` and ``password`` - ``username`` and ``blob`` - ``username`` and ``zeroconf`` To get the ``blob`` string, you must once log in with ``username`` and ``password``. You'll then get the ``blob`` string passed to the :attr:`~ConnectionCallbacks.new_credentials` callback. """
username = utils.to_char(username) if password is not None: password = utils.to_char(password) spotifyconnect.Error.maybe_raise( lib.SpConnectionLoginPassword( username, password)) elif blob is not None: blob = utils.to_char(blob) spotifyconnect.Error.maybe_raise( lib.SpConnectionLoginBlob(username, blob)) elif zeroconf is not None: spotifyconnect.Error.maybe_raise( lib.SpConnectionLoginZeroConf( username, *zeroconf)) else: raise AttributeError( "Must specify a login method (password, blob or zeroconf)")
<SYSTEM_TASK:> Turn on the alsa_sink sink. <END_TASK> <USER_TASK:> Description: def on(self): """Turn on the alsa_sink sink. This is done automatically when the sink is instantiated, so you'll only need to call this method if you ever call :meth:`off` and want to turn the sink back on. """
assert spotifyconnect._session_instance.player.num_listeners( spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0 spotifyconnect._session_instance.player.on( spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery)