docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
returns Markdown text of selected fragment. Args: endpoint: URL, file, or HTML string stype: { 'css' | 'xpath' } selector: CSS selector or XPath expression Returns: Markdown text Options: clean: cleans fragment (lxml.html.clean defaults) raw: returns raw HTML fragment verbose: show http status, encoding, headers
def frag2text(endpoint, stype, selector, clean=False, raw=False, verbose=False): try: return main(endpoint, stype, selector, clean, raw, verbose) except StandardError as err: return err
927,488
Returns unpacked Python struct array. Args: offset (int): offset to byte array within structure length (int): how many bytes to unpack format (str): Python struct format string for unpacking See Also: https://docs.python.org/2/library/struct.html#format-characters
def get_field(self, offset, length, format): return struct.unpack(format, self.data[offset:offset + length])[0]
928,086
Returns string (length bytes) Args: offset (int): sring offset in byte array length (int): string length
def get_string(self, offset, length): return struct.unpack(str(length) + "s", self.data[ offset:offset + length ])[0]
928,087
Exports byte array to specified destination Args: filename (str): destination to output file offset (int): byte offset (default: 0)
def export(self, filename, offset=0, length=None): self.__validate_offset(filename=filename, offset=offset, length=length) with open(filename, 'w') as f: if length is None: length = len(self.data) - offset if offset > 0: output = self.data[offset:length] else: output = self.data[:length] f.write(output)
928,088
Print out a help message and exit the program. Args: command: If a command value is supplied then print the help message for the command module if available. If the command is '-a' or '--all', then print the standard help message but with a full list of available commands. Raises: ValueError: Raised if the help message is requested for an invalid command or an unrecognized option is passed to help.
def get_help_usage(command): # type: (str) -> None if not command: doc = get_primary_command_usage() elif command in ('-a', '--all'): subcommands = [k for k in settings.subcommands if k is not None] available_commands = subcommands + ['help'] command_doc = '\nAvailable commands:\n{}\n'.format( '\n'.join(' {}'.format(c) for c in sorted(available_commands))) doc = get_primary_command_usage(command_doc) elif command.startswith('-'): raise ValueError("Unrecognized option '{}'.".format(command)) elif command in settings.subcommands: subcommand = settings.subcommands[command] doc = format_usage(subcommand.__doc__) docopt.docopt(doc, argv=('--help',))
928,319
Format the docstring for display to the user. Args: doc: The docstring to reformat for display. Returns: The docstring formatted to parse and display to the user. This includes dedenting, rewrapping, and translating the docstring if necessary.
def format_usage(doc, width=None): # type: (str, Optional[int]) -> str sections = doc.replace('\r', '').split('\n\n') width = width or get_terminal_size().columns or 80 return '\n\n'.join(_wrap_section(s.strip(), width) for s in sections)
928,320
Parse a docopt-style string for commands and subcommands. Args: docstring: A docopt-style string to parse. If the string is not a valid docopt-style string, it will not yield and values. Yields: All tuples of commands and subcommands found in the docopt docstring.
def parse_commands(docstring): # type: (str) -> Generator[Tuple[List[str], List[str]], None, None] try: docopt.docopt(docstring, argv=()) except (TypeError, docopt.DocoptLanguageError): return except docopt.DocoptExit: pass for command in _parse_section('usage', docstring): args = command.split() commands = [] i = 0 for i, arg in enumerate(args): if arg[0].isalpha() and not arg[0].isupper(): commands.append(arg) else: break yield commands, args[i:]
928,321
Merge two usage strings together. Args: original: The source of headers and initial section lines. to_merge: The source for the additional section lines to append. Returns: A new usage string that contains information from both usage strings.
def _merge_doc(original, to_merge): # type: (str, str) -> str if not original: return to_merge or '' if not to_merge: return original or '' sections = [] for name in ('usage', 'arguments', 'options'): sections.append(_merge_section( _get_section(name, original), _get_section(name, to_merge) )) return format_usage('\n\n'.join(s for s in sections).rstrip())
928,322
Merge two sections together. Args: original: The source of header and initial section lines. to_merge: The source for the additional section lines to append. Returns: A new section string that uses the header of the original argument and the section lines from both.
def _merge_section(original, to_merge): # type: (str, str) -> str if not original: return to_merge or '' if not to_merge: return original or '' try: index = original.index(':') + 1 except ValueError: index = original.index('\n') name = original[:index].strip() section = '\n '.join( (original[index + 1:].lstrip(), to_merge[index + 1:].lstrip()) ).rstrip() return '{name}\n {section}'.format(name=name, section=section)
928,323
Extract the named section from the source. Args: name: The name of the section to extract (e.g. "Usage"). source: The usage string to parse. Returns: A string containing only the requested section. If the section appears multiple times, each instance will be merged into a single section.
def _get_section(name, source): # type: (str, str) -> Optional[str] pattern = re.compile( '^([^\n]*{name}[^\n]*\n?(?:[ \t].*?(?:\n|$))*)'.format(name=name), re.IGNORECASE | re.MULTILINE) usage = None for section in pattern.findall(source): usage = _merge_section(usage, section.strip()) return usage
928,324
Wrap the given section string to the current terminal size. Intelligently wraps the section string to the given width. When wrapping section lines, it auto-adjusts the spacing between terms and definitions. It also adjusts commands the fit the correct length for the arguments. Args: source: The section string to wrap. Returns: The wrapped section string.
def _wrap_section(source, width): # type: (str, int) -> str if _get_section('usage', source): return _wrap_usage_section(source, width) if _is_definition_section(source): return _wrap_definition_section(source, width) lines = inspect.cleandoc(source).splitlines() paragraphs = (textwrap.wrap(line, width, replace_whitespace=False) for line in lines) return '\n'.join(line for paragraph in paragraphs for line in paragraph)
928,325
Determine if the source is a definition section. Args: source: The usage string source that may be a section. Returns: True if the source describes a definition section; otherwise, False.
def _is_definition_section(source): try: definitions = textwrap.dedent(source).split('\n', 1)[1].splitlines() return all( re.match(r'\s\s+((?!\s\s).+)\s\s+.+', s) for s in definitions) except IndexError: return False
928,326
Wrap the given usage section string to the current terminal size. Note: Commands arguments are wrapped to the column that the arguments began on the first line of the command. Args: source: The section string to wrap. Returns: The wrapped section string.
def _wrap_usage_section(source, width): # type: (str, int) -> str if not any(len(line) > width for line in source.splitlines()): return source section_header = source[:source.index(':') + 1].strip() lines = [section_header] for commands, args in parse_commands(source): command = ' {} '.format(' '.join(commands)) max_len = width - len(command) sep = '\n' + ' ' * len(command) wrapped_args = sep.join(textwrap.wrap(' '.join(args), max_len)) full_command = command + wrapped_args lines += full_command.splitlines() return '\n'.join(lines)
928,327
Wrap the given definition section string to the current terminal size. Note: Auto-adjusts the spacing between terms and definitions. Args: source: The section string to wrap. Returns: The wrapped section string.
def _wrap_definition_section(source, width): # type: (str, int) -> str index = source.index('\n') + 1 definitions, max_len = _get_definitions(source[index:]) sep = '\n' + ' ' * (max_len + 4) lines = [source[:index].strip()] for arg, desc in six.iteritems(definitions): wrapped_desc = sep.join(textwrap.wrap(desc, width - max_len - 4)) lines.append(' {arg:{size}} {desc}'.format( arg=arg, size=str(max_len), desc=wrapped_desc )) return '\n'.join(lines)
928,328
Extract a dictionary of arguments and definitions. Args: source: The source for a section of a usage string that contains definitions. Returns: A two-tuple containing a dictionary of all arguments and definitions as well as the length of the longest argument.
def _get_definitions(source): # type: (str) -> Tuple[Dict[str, str], int] max_len = 0 descs = collections.OrderedDict() # type: Dict[str, str] lines = (s.strip() for s in source.splitlines()) non_empty_lines = (s for s in lines if s) for line in non_empty_lines: if line: arg, desc = re.split(r'\s\s+', line.strip()) arg_len = len(arg) if arg_len > max_len: max_len = arg_len descs[arg] = desc return descs, max_len
928,329
Convert Half-width (Hankaku) Katakana to Full-width (Zenkaku) Katakana Params: <unicode> text <unicode> ignore Return: <unicode> converted_text
def h2z(text, ignore='', kana=True, ascii=False, digit=False): def _conv_dakuten(text): text = text.replace(u"ガ", u"ガ").replace(u"ギ", u"ギ") text = text.replace(u"グ", u"グ").replace(u"ゲ", u"ゲ") text = text.replace(u"ゴ", u"ゴ").replace(u"ザ", u"ザ") text = text.replace(u"ジ", u"ジ").replace(u"ズ", u"ズ") text = text.replace(u"ゼ", u"ゼ").replace(u"ゾ", u"ゾ") text = text.replace(u"ダ", u"ダ").replace(u"ヂ", u"ヂ") text = text.replace(u"ヅ", u"ヅ").replace(u"デ", u"デ") text = text.replace(u"ド", u"ド").replace(u"バ", u"バ") text = text.replace(u"ビ", u"ビ").replace(u"ブ", u"ブ") text = text.replace(u"ベ", u"ベ").replace(u"ボ", u"ボ") text = text.replace(u"パ", u"パ").replace(u"ピ", u"ピ") text = text.replace(u"プ", u"プ").replace(u"ペ", u"ペ") return text.replace(u"ポ", u"ポ").replace(u"ヴ", u"ヴ") if ascii: if digit: if kana: h2z_map = H2Z_ALL else: h2z_map = H2Z_AD elif kana: h2z_map = H2Z_AK else: h2z_map = H2Z_A elif digit: if kana: h2z_map = H2Z_DK else: h2z_map = H2Z_D else: h2z_map = H2Z_K if kana: text = _conv_dakuten(text) if ignore: h2z_map = _exclude_ignorechar(ignore, h2z_map.copy()) return _convert(text, h2z_map)
928,683
u"""Convert Half-width (Hankaku) Katakana to Full-width (Zenkaku) Katakana, Full-width (Zenkaku) ASCII and DIGIT to Half-width (Hankaku) ASCII and DIGIT. Additionally, Full-width wave dash (〜) etc. are normalized Params: <unicode> text <unicode> ignore Return: <unicode> converted_text
def normalize(text, mode='NFKC', ignore=''): u text = text.replace(u'〜', u'ー').replace(u'~', u'ー') text = text.replace(u"’", "'").replace(u'”', '"').replace(u'“', '``') text = text.replace(u'―', '-').replace(u'‐', u'-') return unicodedata.normalize(mode, text)
928,684
This method parses a UUID protobuf message type from its component 'high' and 'low' longs into a standard formatted UUID string Args: x (dict): containing keys, 'low' and 'high' corresponding to the UUID protobuf message type Returns: str: UUID formatted string
def get_uuid_string(low=None, high=None, **x): if low is None or high is None: return None x = ''.join([parse_part(low), parse_part(high)]) return '-'.join([x[:8], x[8:12], x[12:16], x[16:20], x[20:32]])
929,268
Send a JSON RPC notification to the client. Args: method (str): The method name of the notification to send params (any): The payload of the notification
def notify(self, method, params=None): log.debug('Sending notification: %s %s', method, params) message = { 'jsonrpc': JSONRPC_VERSION, 'method': method, } if params is not None: message['params'] = params self._consumer(message)
930,623
Send a JSON RPC request to the client. Args: method (str): The method name of the message to send params (any): The payload of the message Returns: Future that will resolve once a response has been received
def request(self, method, params=None): msg_id = self._id_generator() log.debug('Sending request with id %s: %s %s', msg_id, method, params) message = { 'jsonrpc': JSONRPC_VERSION, 'id': msg_id, 'method': method, } if params is not None: message['params'] = params request_future = futures.Future() request_future.add_done_callback(self._cancel_callback(msg_id)) self._server_request_futures[msg_id] = request_future self._consumer(message) return request_future
930,624
Consume a JSON RPC message from the client. Args: message (dict): The JSON RPC message sent by the client
def consume(self, message): if 'jsonrpc' not in message or message['jsonrpc'] != JSONRPC_VERSION: log.warn("Unknown message type %s", message) return if 'id' not in message: log.debug("Handling notification from client %s", message) self._handle_notification(message['method'], message.get('params')) elif 'method' not in message: log.debug("Handling response from client %s", message) self._handle_response(message['id'], message.get('result'), message.get('error')) else: try: log.debug("Handling request from client %s", message) self._handle_request(message['id'], message['method'], message.get('params')) except JsonRpcException as e: log.exception("Failed to handle request %s", message['id']) self._consumer({ 'jsonrpc': JSONRPC_VERSION, 'id': message['id'], 'error': e.to_dict() }) except Exception: # pylint: disable=broad-except log.exception("Failed to handle request %s", message['id']) self._consumer({ 'jsonrpc': JSONRPC_VERSION, 'id': message['id'], 'error': JsonRpcInternalError.of(sys.exc_info()).to_dict() })
930,626
Blocking call to listen for messages on the rfile. Args: message_consumer (fn): function that is passed each message as it is read off the socket.
def listen(self, message_consumer): while not self._rfile.closed: request_str = self._read_message() if request_str is None: break try: message_consumer(json.loads(request_str.decode('utf-8'))) except ValueError: log.exception("Failed to parse JSON message %s", request_str) continue
930,634
Tries to find the class that defined the specified method. Will not work for nested classes (locals). Args: fun: Function / Method Returns: Returns the class which defines the given method / function.
def get_class_that_defined_method(fun): if inspect.ismethod(fun): for cls in inspect.getmro(fun.__self__.__class__): if cls.__dict__.get(fun.__name__) is fun: return cls fun = fun.__func__ # fallback to __qualname__ parsing if inspect.isfunction(fun): cls = getattr(inspect.getmodule(fun), fun.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0], None) if isinstance(cls, type): return cls return getattr(fun, '__objclass__', None)
930,966
Interface to PKCS5_PBKDF2_HMAC function Parameters: @param password - password to derive key from @param salt - random salt to use for key derivation @param outlen - number of bytes to derive @param digesttype - name of digest to use to use (default sha1) @param iterations - number of iterations to use @returns outlen bytes of key material derived from password and salt
def pbkdf2(password, salt, outlen, digesttype="sha1", iterations=2000): dgst = DigestType(digesttype) out = create_string_buffer(outlen) if isinstance(password,chartype): pwd = password.encode("utf-8") else: pwd = password res = libcrypto.PKCS5_PBKDF2_HMAC(pwd, len(pwd), salt, len(salt), iterations, dgst.digest, outlen, out) if res <= 0: raise LibCryptoError("error computing PBKDF2") return out.raw
931,525
Creates and returns a function that takes a slot and checks if it matches the wanted item. Args: wanted: function(Slot) or Slot or itemID or (itemID, metadata)
def make_slot_check(wanted): if isinstance(wanted, types.FunctionType): return wanted # just forward the slot check function if isinstance(wanted, int): item, meta = wanted, None elif isinstance(wanted, Slot): item, meta = wanted.item_id, wanted.damage # TODO compare NBT elif isinstance(wanted, (Item, Block)): item, meta = wanted.id, wanted.metadata elif isinstance(wanted, str): item_or_block = get_item_or_block(wanted, init=True) item, meta = item_or_block.id, item_or_block.metadata else: # wanted is (id, meta) try: item, meta = wanted except TypeError: raise ValueError('Illegal args for make_slot_check(): %s' % wanted) return lambda slot: item == slot.item_id and meta in (None, slot.damage)
932,401
Called when the click was successful and should be applied to the inventory. Args: inv_plugin (InventoryPlugin): inventory plugin instance emit_set_slot (func): function to signal a slot change, should be InventoryPlugin().emit_set_slot
def on_success(self, inv_plugin, emit_set_slot): self.dirty = set() self.apply(inv_plugin) for changed_slot in self.dirty: emit_set_slot(changed_slot)
932,412
Calculates the total number of items of that type in the current window or given slot range. Args: wanted: function(Slot) or Slot or itemID or (itemID, metadata)
def total_stored(self, wanted, slots=None): if slots is None: slots = self.window.slots wanted = make_slot_check(wanted) return sum(slot.amount for slot in slots if wanted(slot))
932,435
Searches the given slots or, if not given, active hotbar slot, hotbar, inventory, open window in this order. Args: wanted: function(Slot) or Slot or itemID or (itemID, metadata) Returns: Optional[Slot]: The first slot containing the item or None if not found.
def find_slot(self, wanted, slots=None): for slot in self.find_slots(wanted, slots): return slot return None
932,436
Yields all slots containing the item. Searches the given slots or, if not given, active hotbar slot, hotbar, inventory, open window in this order. Args: wanted: function(Slot) or Slot or itemID or (itemID, metadata)
def find_slots(self, wanted, slots=None): if slots is None: slots = self.inv_slots_preferred + self.window.window_slots wanted = make_slot_check(wanted) for slot in slots: if wanted(slot): yield slot
932,437
Left-click or right-click the slot. Args: slot (Slot): The clicked slot. Can be ``Slot`` instance or integer. Set to ``inventory.cursor_slot`` for clicking outside the window.
def click_slot(self, slot, right=False): if isinstance(slot, int): slot = self.window.slots[slot] button = constants.INV_BUTTON_RIGHT \ if right else constants.INV_BUTTON_LEFT return self.send_click(windows.SingleClick(slot, button))
932,439
Expands extends getattr to allow dots in x to indicate nested objects. Args: obj (object): an object. name (str): a name for a field in the object. Returns: Any: the value of named attribute. Raises: AttributeError: if the named attribute does not exist.
def _str_dotted_getattr(obj, name): for part in name.split('.'): obj = getattr(obj, part) return str(obj) if obj else None
932,859
Adds elts to the tasks. Args: elts (Sequence): a iterable of elements that can be appended to the task's bundle_field. Returns: Event: an event that can be used to wait on the response.
def extend(self, elts): # Use a copy, not a reference, as it is later necessary to mutate # the proto field from which elts are drawn in order to construct # the bundled request. elts = elts[:] self._in_deque.append(elts) event = self._event_for(elts) self._event_deque.append(event) return event
932,865
Constructor. Args: options (gax.BundleOptions): configures strategy this instance uses when executing bundled functions.
def __init__(self, options): self._options = options self._tasks = {} self._task_lock = threading.RLock()
932,868
Updates a_func so that it gets called with the timeout as its final arg. This converts a callable, a_func, into another callable with an additional positional arg. Args: a_func (callable): a callable to be updated timeout (int): to be added to the original callable as it final positional arg. kwargs: Addtional arguments passed through to the callable. Returns: callable: the original callable updated to the timeout arg
def add_timeout_arg(a_func, timeout, **kwargs): def inner(*args): updated_args = args + (timeout,) return a_func(*updated_args, **kwargs) return inner
932,878
Creates a function equivalent to a_func, but that retries on certain exceptions. Args: a_func (callable): A callable. retry_options (RetryOptions): Configures the exceptions upon which the callable should retry, and the parameters to the exponential backoff retry algorithm. kwargs: Addtional arguments passed through to the callable. Returns: Callable: A function that will retry on exception.
def retryable(a_func, retry_options, **kwargs): delay_mult = retry_options.backoff_settings.retry_delay_multiplier max_delay_millis = retry_options.backoff_settings.max_retry_delay_millis has_timeout_settings = _has_timeout_settings(retry_options.backoff_settings) if has_timeout_settings: timeout_mult = retry_options.backoff_settings.rpc_timeout_multiplier max_timeout = (retry_options.backoff_settings.max_rpc_timeout_millis / _MILLIS_PER_SECOND) total_timeout = (retry_options.backoff_settings.total_timeout_millis / _MILLIS_PER_SECOND) def inner(*args): delay = retry_options.backoff_settings.initial_retry_delay_millis exc = errors.RetryError('Retry total timeout exceeded before any' 'response was received') if has_timeout_settings: timeout = ( retry_options.backoff_settings.initial_rpc_timeout_millis / _MILLIS_PER_SECOND) now = time.time() deadline = now + total_timeout else: timeout = None deadline = None while deadline is None or now < deadline: try: to_call = add_timeout_arg(a_func, timeout, **kwargs) return to_call(*args) except Exception as exception: # pylint: disable=broad-except code = config.exc_to_code(exception) if code not in retry_options.retry_codes: raise errors.RetryError( 'Exception occurred in retry method that was not' ' classified as transient', exception) exc = errors.RetryError( 'Retry total timeout exceeded with exception', exception) # Sleep a random number which will, on average, equal the # expected delay. to_sleep = random.uniform(0, delay * 2) time.sleep(to_sleep / _MILLIS_PER_SECOND) delay = min(delay * delay_mult, max_delay_millis) if has_timeout_settings: now = time.time() timeout = min( timeout * timeout_mult, max_timeout, deadline - now) raise exc return inner
932,879
Raise ValueError if more than one keyword argument is not none. Args: kwargs (dict): The keyword arguments sent to the function. Returns: None Raises: ValueError: If more than one entry in kwargs is not none.
def check_oneof(**kwargs): # Sanity check: If no keyword arguments were sent, this is fine. if not kwargs: return None not_nones = [val for val in kwargs.values() if val is not None] if len(not_nones) > 1: raise ValueError('Only one of {fields} should be set.'.format( fields=', '.join(sorted(kwargs.keys())), ))
932,888
Creates a function that yields an iterable to performs page-streaming. Args: page_descriptor (:class:`PageDescriptor`): indicates the structure of page streaming to be performed. Returns: Callable: A function that returns an iterator.
def _page_streamable(page_descriptor): def inner(a_func, settings, request, **kwargs): page_iterator = gax.PageIterator( a_func, page_descriptor, settings.page_token, request, **kwargs) if settings.flatten_pages: return gax.ResourceIterator(page_iterator) else: return page_iterator return inner
932,890
Helper for ``construct_settings()``. Takes two retry options, and merges them into a single RetryOption instance. Args: retry_options (RetryOptions): The base RetryOptions. overrides (RetryOptions): The RetryOptions used for overriding ``retry``. Use the values if it is not None. If entire ``overrides`` is None, ignore the base retry and return None. Returns: RetryOptions: The merged options, or None if it will be canceled.
def _merge_retry_options(retry_options, overrides): if overrides is None: return None if overrides.retry_codes is None and overrides.backoff_settings is None: return retry_options codes = retry_options.retry_codes if overrides.retry_codes is not None: codes = overrides.retry_codes backoff_settings = retry_options.backoff_settings if overrides.backoff_settings is not None: backoff_settings = overrides.backoff_settings return gax.RetryOptions( backoff_settings=backoff_settings, retry_codes=codes, )
932,893
Updates a_func to wrap exceptions with GaxError Args: a_func (callable): A callable. to_catch (list[Exception]): Configures the exceptions to wrap. Returns: Callable: A function that will wrap certain exceptions with GaxError
def _catch_errors(a_func, to_catch): def inner(*args, **kwargs): try: return a_func(*args, **kwargs) # pylint: disable=catching-non-exception except tuple(to_catch) as exception: utils.raise_with_traceback( gax.errors.create_error('RPC failed', cause=exception)) return inner
932,896
Set the given key on the object. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key on the object in question. value (Any): The value to set. Raises: TypeError: If pb_or_dict is not a Message or Mapping.
def set(pb_or_dict, key, value): # pylint: disable=redefined-builtin,too-many-branches # redefined-builtin: We want 'set' to be part of the public interface. # too-many-branches: This method is inherently complex. # Sanity check: Is our target object valid? if not isinstance(pb_or_dict, (collections.MutableMapping, Message)): raise TypeError('Tried to set a key %s on an invalid object; ' 'expected a dict or protobuf message.' % key) # We may be setting a nested key. Resolve this. key, subkey = _resolve_subkeys(key) # If a subkey exists, then get that object and call this method # recursively against it using the subkey. if subkey is not None: if isinstance(pb_or_dict, collections.MutableMapping): pb_or_dict.setdefault(key, {}) set(get(pb_or_dict, key), subkey, value) return # Attempt to set the value on the types of objects we know how to deal # with. if isinstance(pb_or_dict, collections.MutableMapping): pb_or_dict[key] = value elif isinstance(value, (collections.MutableSequence, tuple)): # Clear the existing repeated protobuf message of any elements # currently inside it. while getattr(pb_or_dict, key): getattr(pb_or_dict, key).pop() # Write our new elements to the repeated field. for item in value: if isinstance(item, collections.Mapping): getattr(pb_or_dict, key).add(**item) else: getattr(pb_or_dict, key).extend([item]) elif isinstance(value, collections.Mapping): # Assign the dictionary values to the protobuf message. for item_key, item_value in value.items(): set(getattr(pb_or_dict, key), item_key, item_value) elif isinstance(value, Message): # Assign the protobuf message values to the protobuf message. for item_key, item_value in value.ListFields(): set(getattr(pb_or_dict, key), item_key.name, item_value) else: setattr(pb_or_dict, key, value)
932,904
Given a key which may actually be a nested key, return the top level key and any nested subkeys as separate values. Args: key (str): A string that may or may not contain the separator. separator (str): The namespace separator. Defaults to `.`. Returns: Tuple[str, str]: The key and subkey(s).
def _resolve_subkeys(key, separator='.'): subkey = None if separator in key: index = key.index(separator) subkey = key[index + 1:] key = key[:index] return key, subkey
932,906
Constructor. Args: page_iterator (PageIterator): the base iterator of getting pages.
def __init__(self, page_iterator): self._page_iterator = page_iterator self._current = None self._index = -1
932,915
Construct a new 'FIOCurrently' object. Recieves an ForecastIO object and gets the currently weather conditions if they are available in the object. Args: forecast_io (ForecastIO): The ForecastIO object
def __init__(self, forecast_io): if forecast_io.has_currently(): self.currently = forecast_io.get_currently() for item in self.currently.keys(): setattr(self, item, self.currently[item])
933,805
Constructor for the DNS query class Params: nameserver- the nameserver to use, defaults to the local resolver rtype- the record type to lookup (as text), by default A timeout- how long to wait for a response, by default 10 seconds
def __init__(self, domains=[], results={}, nameservers=[], exclude_nameservers=[], rtype="A", timeout=10, max_threads=100): self.domains = domains self.results = results self.rtype = rtype self.timeout = timeout self.max_threads = max_threads if len(nameservers) == 0: nameservers = dns.resolver.Resolver().nameservers # remove excluded nameservers if len(exclude_nameservers) > 0: for nameserver in exclude_nameservers: if nameserver in nameservers: nameservers.remove(nameserver) # include google nameserver if "8.8.8.8" not in nameservers: nameservers.append("8.8.8.8") self.nameservers = nameservers self.threads = [] # start point of port number to be used self.port = 30000 # create thread lock for port number index self.port_lock = threading.Lock()
933,897
Create a script with the given content, mv it to the destination, and make it executable Parameters: content- the content to put in the script destination- the directory to copy to Note: due to constraints on os.rename, destination must be an absolute path to a file, not just a directory
def create_script_for_location(content, destination): temp = tempfile.NamedTemporaryFile(mode='w', delete=False) temp.write(content) temp.close() shutil.move(temp.name, destination) cur_perms = os.stat(destination).st_mode set_perms = cur_perms | stat.S_IXOTH | stat.S_IXGRP | stat.S_IXUSR os.chmod(destination, set_perms)
933,960
Computes the parameters depending on :math:`\lambda`. Notes ----- It needs to be called again if :math:`\lambda` changes during evolution. Parameters ---------- params: A dictionary of the manually set parameters.
def computeParams(self, params): self.mu = params.get("mu", int(self.lambda_ / 2)) rweights = params.get("weights", "superlinear") if rweights == "superlinear": self.weights = numpy.log(self.mu + 0.5) - \ numpy.log(numpy.arange(1, self.mu + 1)) elif rweights == "linear": self.weights = self.mu + 0.5 - numpy.arange(1, self.mu + 1) elif rweights == "equal": self.weights = numpy.ones(self.mu) else: raise RuntimeError("Unknown weights : %s" % rweights) self.weights /= sum(self.weights) self.mueff = 1. / sum(self.weights ** 2) self.cc = params.get("ccum", 4. / (self.dim + 4.)) self.cs = params.get("cs", (self.mueff + 2.) / (self.dim + self.mueff + 3.)) self.ccov1 = params.get( "ccov1", 2. / ((self.dim + 1.3) ** 2 + self.mueff)) self.ccovmu = params.get("ccovmu", 2. * ( self.mueff - 2. + 1. / self.mueff) / ( (self.dim + 2.) ** 2 + self.mueff)) self.ccovmu = min(1 - self.ccov1, self.ccovmu) self.damps = 1. + 2. * \ max(0, numpy.sqrt((self.mueff - 1.) / (self.dim + 1.)) - 1.) + \ self.cs self.damps = params.get("damps", self.damps) return
934,287
Initializes OpenLoad instance with given parameters and formats api base url. Args: api_login (str): API Login found in openload.co api_key (str): API Key found in openload.co Returns: None
def __init__(self, api_login, api_key): self.login = api_login self.key = api_key self.api_url = self.api_base_url.format(api_version=self.api_version)
934,634
Check the status of the incoming response, raise exception if status is not 200. Args: response_json (dict): results of the response of the GET request. Returns: None
def _check_status(cls, response_json): status = response_json['status'] msg = response_json['msg'] if status == 400: raise BadRequestException(msg) elif status == 403: raise PermissionDeniedException(msg) elif status == 404: raise FileNotFoundException(msg) elif status == 451: raise UnavailableForLegalReasonsException(msg) elif status == 509: raise BandwidthUsageExceeded(msg) elif status >= 500: raise ServerErrorException(msg)
934,635
Used by every other method, it makes a GET request with the given params. Args: url (str): relative path of a specific service (account_info, ...). params (:obj:`dict`, optional): contains parameters to be sent in the GET request. Returns: dict: results of the response of the GET request.
def _get(self, url, params=None): if not params: params = {} params.update({'login': self.login, 'key': self.key}) response_json = requests.get(self.api_url + url, params).json() return self._process_response(response_json)
934,636
Assembler of parameters for building request query. Args: query_string: Query to be passed to DuckDuckGo API. no_redirect: Skip HTTP redirects (for !bang commands). Default - False. no_html: Remove HTML from text, e.g. bold and italics. Default - False. skip_disambig: Skip disambiguation (D) Type. Default - False. Returns: A “percent-encoded” string which is used as a part of the query.
def url_assembler(query_string, no_redirect=0, no_html=0, skip_disambig=0): params = [('q', query_string.encode("utf-8")), ('format', 'json')] if no_redirect: params.append(('no_redirect', 1)) if no_html: params.append(('no_html', 1)) if skip_disambig: params.append(('skip_disambig', 1)) return '/?' + urlencode(params)
934,653
Creates a new Type object (an instance of TypeMetaclass). Args: name (str): the name of the new type. parents (list(str)): a list of superclasses. attributes: (???): a map from name to value for "parameters" for defining the new type.
def __new__(mcls, name, parents, attributes): return type.__new__(mcls, name, parents, attributes)
934,718
Create an instance of a type signature. Args: cls (Class): the "type" of the object this signature represents. required (bool): default(object): an instance of the type for a default value. This should be either an instance of cls or something coercable to cls.
def __init__(self, cls, required=False, default=Empty): assert isclass(cls) assert issubclass(cls, Object) if default is not Empty and not isinstance(default, cls): self._default = cls(default) else: self._default = default self._cls = cls self._required = required
934,741
Queries data from a /<security_type>/info endpoint. Args: security_symbols (list): List of string symbols info_field_codes (list): List of string info field codes Returns: dict of the decoded json from server response. Notes: The max length of any list arg is 100
def get_info(self, security_symbols, info_field_codes): security_symbols = self._str_or_list(security_symbols) info_field_codes = self._str_or_list(info_field_codes) url_path = self._build_url_path(security_symbols, 'info', info_field_codes) return self._get_data(url_path, None)
935,026
Initialise a new instance. Arguments: host: the host to serve folder: the folder to serve files from
def __init__(self, host, folder, event_loop=None, timeout=15): self.host = host self.folder = folder self.logger = logger.getChild('HttpProtocol {}'.format(id(self))) self.logger.debug('Instantiated HttpProtocol') self._loop = event_loop or asyncio.get_event_loop() self._timeout = timeout self._timeout_handle = None
935,551
Write the response back to the client Arguments: response -- the dictionary containing the response.
def _write_response(self, response): status = '{} {} {}\r\n'.format(response['version'], response['code'], responses[response['code']]) self.logger.debug("Responding status: '%s'", status.strip()) self._write_transport(status) if 'body' in response and 'Content-Length' not in response['headers']: response['headers']['Content-Length'] = len(response['body']) response['headers']['Date'] = datetime.utcnow().strftime( "%a, %d %b %Y %H:%M:%S +0000") for (header, content) in response['headers'].items(): self.logger.debug("Sending header: '%s: %s'", header, content) self._write_transport('{}: {}\r\n'.format(header, content)) self._write_transport('\r\n') if 'body' in response: self._write_transport(response['body'])
935,553
Render an entire form with Semantic UI wrappers for each field Args: form (form): Django Form exclude (string): exclude fields by name, separated by commas kwargs (dict): other attributes will be passed to fields Returns: string: HTML of Django Form fields with Semantic UI wrappers
def render_form(form, exclude=None, **kwargs): if hasattr(form, "Meta") and hasattr(form.Meta, "layout"): return render_layout_form(form, getattr(form.Meta, "layout"), **kwargs) if exclude: exclude = exclude.split(",") for field in exclude: form.fields.pop(field) return mark_safe("".join([ render_field(field, **kwargs) for field in form ]))
936,851
Render an entire form with Semantic UI wrappers for each field with a layout provided in the template or in the form class Args: form (form): Django Form layout (tuple): layout design kwargs (dict): other attributes will be passed to fields Returns: string: HTML of Django Form fields with Semantic UI wrappers
def render_layout_form(form, layout=None, **kwargs): def make_component(type_, *args): if type_ == "Text": return "".join(args) elif type_ == "Field": result = "" for c in args: if isinstance(c, tuple): result += make_component(*c) elif isinstance(c, str): result += render_field(form.__getitem__(c), **kwargs) return result else: if len(args) < 2: return "" result = "".join([make_component(*c) for c in args]) if type_: return "<div class=\"%s\">%s</div>" % (type_.lower(), result) else: return result return mark_safe("".join([make_component(*component) for component in layout]))
936,852
Initializer for Field class. Args: field (BoundField): Form field **kwargs (dict): Field attributes
def __init__(self, field, **kwargs): # Kwargs will always be additional attributes self.attrs = kwargs self.attrs.update(field.field.widget.attrs) # Field self.field = field self.widget = field.field.widget # Defaults self.values = {"class": [], "label": "", "help": "", "errors": ""}
936,853
Find choices of a field, whether it has choices or has a queryset. Args: field (BoundField): Django form boundfield Returns: list: List of choices
def get_choices(field): empty_label = getattr(field.field, "empty_label", False) needs_empty_value = False choices = [] # Data is the choices if hasattr(field.field, "_choices"): choices = field.field._choices # Data is a queryset elif hasattr(field.field, "_queryset"): queryset = field.field._queryset field_name = getattr(field.field, "to_field_name") or "pk" choices += ((getattr(obj, field_name), str(obj)) for obj in queryset) # Determine if an empty value is needed if choices and (choices[0][1] == BLANK_CHOICE_DASH[0][1] or choices[0][0]): needs_empty_value = True # Delete empty option if not choices[0][0]: del choices[0] # Remove dashed empty choice if empty_label == BLANK_CHOICE_DASH[0][1]: empty_label = None # Add custom empty value if empty_label or not field.field.required: if needs_empty_value: choices.insert(0, ("", empty_label or BLANK_CHOICE_DASH[0][1])) return choices
936,939
signal that the connection is ready This method signals to the client that the connection is ready for use. PARAMETERS: known_hosts: shortstr
def _open_ok(self, args): self.known_hosts = args.read_shortstr() AMQP_LOGGER.debug('Open OK! known_hosts [%s]' % self.known_hosts) return None
937,693
Property decorator that facilitates writing properties for values from a remote device. Arguments: name: The field name to use on the local object to store the cached property. get_command: A function that returns the remote value of the property. set_command: A function that accepts a new value for the property and sets it remotely. field_name: The name of the field to retrieve from the response message to get operations.
def remote_property(name, get_command, set_command, field_name, doc=None): def getter(self): try: return getattr(self, name) except AttributeError: value = getattr(self.sendCommand(get_command()), field_name) setattr(self, name, value) return value def setter(self, value): setattr(self, name, value) self.sendCommand(set_command(value)) return property(getter, setter, doc=doc)
938,015
Constructor. Arguments: vid: Vendor ID pid: Product ID
def __init__(self, vid, pid): self.hid = hid.device() self.hid.open(vid, pid) self.gpio_direction = GPIOSettings(self, commands.GetGPIODirectionCommand, commands.SetGPIODirectionCommand) self.gpio = GPIOSettings(self, commands.GetGPIOValueCommand, commands.SetGPIOValueCommand) self.eeprom = EEPROMData(self) self.cancel_transfer()
938,022
Sends a Command object to the MCP2210 and returns its response. Arguments: A commands.Command instance Returns: A commands.Response instance, or raises a CommandException on error.
def sendCommand(self, command): command_data = [ord(x) for x in buffer(command)] self.hid.write(command_data) response_data = ''.join(chr(x) for x in self.hid.read(64)) response = command.RESPONSE.from_buffer_copy(response_data) if response.status != 0: raise CommandException(response.status) return response
938,023
Transfers data over SPI. Arguments: data: The data to transfer. Returns: The data returned by the SPI device.
def transfer(self, data): settings = self.transfer_settings settings.spi_tx_size = len(data) self.transfer_settings = settings response = '' for i in range(0, len(data), 60): response += self.sendCommand(commands.SPITransferCommand(data[i:i + 60])).data time.sleep(0.01) while len(response) < len(data): response += self.sendCommand(commands.SPITransferCommand('')).data return ''.join(response)
938,024
This method occurs after dumping the data into the class. Args: data (dict): dictionary of all the query values Returns: data (dict): ordered dict of all the values
def build_url(self, data): query_part_one = [] query_part_two = [] keys_to_be_removed = [] for key, value in data.items(): if key not in ['version', 'restApi', 'resourcePath']: if key == 'mapArea': query_part_one.append(','.join(str(val) for val in value)) keys_to_be_removed.append(key) elif key == 'includeLocationCodes': query_part_one.append(value) keys_to_be_removed.append(key) else: if isinstance(value, list): value = ','.join(str(val) for val in value) query_part_two.append('{0}={1}'.format(key, value)) keys_to_be_removed.append(key) for k in keys_to_be_removed: del data[k] data['query'] = '{0}?{1}'.format('/'.join(query_part_one), '&'.join(query_part_two)) return data
938,083
This method occurs after dumping the data into the class. Args: data (dict): dictionary of all the query values Returns: data (dict): ordered dict of all the values
def build_query_string(self, data): query_params = [] keys_to_be_removed = [] for key, value in data.items(): if key not in ['version', 'restApi', 'resourcePath']: if key == 'addressLine': query_params.append('{0}={1}'.format(key, quote(value))) keys_to_be_removed.append(key) else: query_params.append('{0}={1}'.format(key, value)) keys_to_be_removed.append(key) data['query'] = "&".join(query_params) for k in keys_to_be_removed: del data[k] return data
938,086
This method occurs after dumping the data into the class. Args: data (dict): dictionary of all the query values Returns: data (dict): ordered dict of all the values
def build_query_string(self, data): queryValues = [] keys_to_be_removed = [] for key, value in data.items(): if key not in ['version', 'restApi', 'resourcePath']: if not key == 'point': queryValues.append('{0}={1}'.format(key, value)) keys_to_be_removed.append(key) keys_to_be_removed.append(key) queryString = '&'.join(queryValues) data['query'] = '{0}?{1}'.format(data['point'], queryString) for k in list(set(keys_to_be_removed)): del data[k] return data
938,088
This method occurs after dumping the data into the class. Args: data (dict): dictionary of all the query values Returns: data (dict): ordered dict of all the values
def build_query_string(self, data): query = [] keys_to_be_removed = [] for key, value in data.items(): if key not in ['version', 'restApi', 'resourcePath']: if not key == 'method': if key == 'points': value = ','.join(str(val) for val in value) keys_to_be_removed.append(key) query.append('{0}={1}'.format(key, value)) keys_to_be_removed.append(key) keys_to_be_removed.append(key) querystring = '&'.join(query) data['query'] = '{0}?{1}'.format(data['method'], querystring) for k in list(set(keys_to_be_removed)): del data[k] return data
938,116
A quadratic tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutQuad(n): _checkRange(n) if n < 0.5: return 2 * n**2 else: n = n * 2 - 1 return -0.5 * (n*(n-2) - 1)
938,269
A cubic tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutCubic(n): _checkRange(n) n = 2 * n if n < 1: return 0.5 * n**3 else: n = n - 2 return 0.5 * (n**3 + 2)
938,270
A quartic tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutQuart(n): _checkRange(n) n = 2 * n if n < 1: return 0.5 * n**4 else: n = n - 2 return -0.5 * (n**4 - 2)
938,271
A quintic tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutQuint(n): _checkRange(n) n = 2 * n if n < 1: return 0.5 * n**5 else: n = n - 2 return 0.5 * (n**5 + 2)
938,272
An exponential tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutExpo(n): _checkRange(n) if n == 0: return 0 elif n == 1: return 1 else: n = n * 2 if n < 1: return 0.5 * 2**(10 * (n - 1)) else: n -= 1 # 0.5 * (-() + 2) return 0.5 * (-1 * (2 ** (-10 * n)) + 2)
938,273
A circular tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutCirc(n): _checkRange(n) n = n * 2 if n < 1: return -0.5 * (math.sqrt(1 - n**2) - 1) else: n = n - 2 return 0.5 * (math.sqrt(1 - n**2) + 1)
938,274
An elastic tween function that begins with an increasing wobble and then snaps into the destination. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInElastic(n, amplitude=1, period=0.3): _checkRange(n) return 1 - easeOutElastic(1-n, amplitude=amplitude, period=period)
938,275
An elastic tween function that overshoots the destination and then "rubber bands" into the destination. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeOutElastic(n, amplitude=1, period=0.3): _checkRange(n) if amplitude < 1: amplitude = 1 s = period / 4 else: s = period / (2 * math.pi) * math.asin(1 / amplitude) return amplitude * 2**(-10*n) * math.sin((n-s)*(2*math.pi / period)) + 1
938,276
An elastic tween function wobbles towards the midpoint. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutElastic(n, amplitude=1, period=0.5): _checkRange(n) n *= 2 if n < 1: return easeInElastic(n, amplitude=amplitude, period=period) / 2 else: return easeOutElastic(n-1, amplitude=amplitude, period=period) / 2 + 0.5
938,277
A tween function that backs up first at the start and then goes to the destination. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInBack(n, s=1.70158): _checkRange(n) return n * n * ((s + 1) * n - s)
938,278
A tween function that overshoots the destination a little and then backs into the destination. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeOutBack(n, s=1.70158): _checkRange(n) n = n - 1 return n * n * ((s + 1) * n + s) + 1
938,279
A "back-in" tween function that overshoots both the start and destination. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeInOutBack(n, s=1.70158): _checkRange(n) n = n * 2 if n < 1: s *= 1.525 return 0.5 * (n * n * ((s + 1) * n - s)) else: n -= 2 s *= 1.525 return 0.5 * (n * n * ((s + 1) * n + s) + 2)
938,280
A bouncing tween function that hits the destination and then bounces to rest. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
def easeOutBounce(n): _checkRange(n) if n < (1/2.75): return 7.5625 * n * n elif n < (2/2.75): n -= (1.5/2.75) return 7.5625 * n * n + 0.75 elif n < (2.5/2.75): n -= (2.25/2.75) return 7.5625 * n * n + 0.9375 else: n -= (2.65/2.75) return 7.5625 * n * n + 0.984375
938,281
Parses an etext meta-data definition to extract fields. Args: ebook (xml.etree.ElementTree.Element): An ebook meta-data definition.
def parsemetadata(ebook): result = dict.fromkeys(META_FIELDS) # get etext no about = ebook.get('{%(rdf)s}about' % NS) result['id'] = int(os.path.basename(about)) # author creator = ebook.find('.//{%(dc)s}creator' % NS) if creator is not None: name = creator.find('.//{%(pg)s}name' % NS) if name is not None: result['author'] = safeunicode(name.text, encoding='utf-8') birth = creator.find('.//{%(pg)s}birthdate' % NS) if birth is not None: result['authoryearofbirth'] = int(birth.text) death = creator.find('.//{%(pg)s}deathdate' % NS) if death is not None: result['authoryearofdeath'] = int(death.text) # title title = ebook.find('.//{%(dc)s}title' % NS) if title is not None: result['title'] = fixsubtitles( safeunicode(title.text, encoding='utf-8')) # subject lists result['subjects'], result['LCC'] = set(), set() for subject in ebook.findall('.//{%(dc)s}subject' % NS): res = subject.find('.//{%(dcam)s}memberOf' % NS) if res is None: continue res = res.get('{%(rdf)s}resource' % NS) value = subject.find('.//{%(rdf)s}value' % NS).text if res == ('%(dc)sLCSH' % NS): result['subjects'].add(value) elif res == ('%(dc)sLCC' % NS): result['LCC'].add(value) # formats result['formats'] = {file.find('{%(dc)s}format//{%(rdf)s}value' % NS).text: file.get('{%(rdf)s}about' % NS) for file in ebook.findall('.//{%(pg)s}file' % NS)} # type booktype = ebook.find('.//{%(dc)s}type//{%(rdf)s}value' % NS) if booktype is not None: result['type'] = booktype.text # languages lang = ebook.findall('.//{%(dc)s}language//{%(rdf)s}value' % NS) result['language'] = [a.text for a in lang] or None # download count downloads = ebook.find('.//{%(pg)s}downloads' % NS) if downloads is not None: result['downloads'] = int(downloads.text) return result
938,569
Imputs NaN's using various filling methods like mean, zero, median, min, random Args: fill_method: How NaN's will be exchanged. Possible values: 'mean', 'zero', 'median', 'min', 'random' fill_missing: If True, transformer will fill NaN values by filling method
def __init__(self, fill_method='zero', fill_missing=True, **kwargs): super().__init__() self.fill_missing = fill_missing self.filler = SimpleFill(fill_method)
939,232
Performs predictions blending using the trained weights. Args: X (array-like): Predictions of different models. Returns: dict with blended predictions (key is 'y_pred').
def transform(self, X): assert np.shape(X)[0] == len(self._weights), ( 'BlendingOptimizer: Number of models to blend its predictions and weights does not match: ' 'n_models={}, weights_len={}'.format(np.shape(X)[0], len(self._weights))) blended_predictions = np.average(np.power(X, self._power), weights=self._weights, axis=0) ** (1.0 / self._power) return {'y_pred': blended_predictions}
939,316
Gets message matching provided id. the Outlook email matching the provided message_id. Args: message_id: A string for the intended message, provided by Outlook Returns: :class:`Message <pyOutlook.core.message.Message>`
def get_message(self, message_id): r = requests.get('https://outlook.office.com/api/v2.0/me/messages/' + message_id, headers=self._headers) check_response(r) return Message._json_to_message(self, r.json())
940,297
Retrieve a Folder by its Outlook ID Args: folder_id: The ID of the :class:`Folder <pyOutlook.core.folder.Folder>` to retrieve Returns: :class:`Folder <pyOutlook.core.folder.Folder>`
def get_folder_by_id(self, folder_id): endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' + folder_id r = requests.get(endpoint, headers=self._headers) check_response(r) return_folder = r.json() return Folder._json_to_folder(self, return_folder)
940,302
Retrieves all messages from a folder, specified by its name. This only works with "Well Known" folders, such as 'Inbox' or 'Drafts'. Args: folder_name (str): The name of the folder to retrieve Returns: List[:class:`Message <pyOutlook.core.message.Message>` ]
def _get_messages_from_folder_name(self, folder_name): r = requests.get('https://outlook.office.com/api/v2.0/me/MailFolders/' + folder_name + '/messages', headers=self._headers) check_response(r) return Message._json_to_messages(self, r.json())
940,303
Returns the JSON representation of this message required for making requests to the API. Args: content_type (str): Either 'HTML' or 'Text'
def api_representation(self, content_type): payload = dict(Subject=self.subject, Body=dict(ContentType=content_type, Content=self.body)) if self.sender is not None: payload.update(From=self.sender.api_representation()) # A list of strings can also be provided for convenience. If provided, convert them into Contacts if any(isinstance(item, str) for item in self.to): self.to = [Contact(email=email) for email in self.to] # Turn each contact into the JSON needed for the Outlook API recipients = [contact.api_representation() for contact in self.to] payload.update(ToRecipients=recipients) # Conduct the same process for CC and BCC if needed if self.cc: if any(isinstance(email, str) for email in self.cc): self.cc = [Contact(email) for email in self.cc] cc_recipients = [contact.api_representation() for contact in self.cc] payload.update(CcRecipients=cc_recipients) if self.bcc: if any(isinstance(email, str) for email in self.bcc): self.bcc = [Contact(email) for email in self.bcc] bcc_recipients = [contact.api_representation() for contact in self.bcc] payload.update(BccRecipients=bcc_recipients) if self._attachments: payload.update(Attachments=[attachment.api_representation() for attachment in self._attachments]) payload.update(Importance=str(self.importance)) return dict(Message=payload)
940,311
Takes the recipients, body, and attachments of the Message and sends. Args: content_type: Can either be 'HTML' or 'Text', defaults to HTML.
def send(self, content_type='HTML'): payload = self.api_representation(content_type) endpoint = 'https://outlook.office.com/api/v1.0/me/sendmail' self._make_api_call('post', endpoint=endpoint, data=json.dumps(payload))
940,313
Reply to the Message. Notes: HTML can be inserted in the string and will be interpreted properly by Outlook. Args: reply_comment: String message to send with email.
def reply(self, reply_comment): payload = '{ "Comment": "' + reply_comment + '"}' endpoint = 'https://outlook.office.com/api/v2.0/me/messages/' + self.message_id + '/reply' self._make_api_call('post', endpoint, data=payload)
940,315
Replies to everyone on the email, including those on the CC line. With great power, comes great responsibility. Args: reply_comment: The string comment to send to everyone on the email.
def reply_all(self, reply_comment): payload = '{ "Comment": "' + reply_comment + '"}' endpoint = 'https://outlook.office.com/api/v2.0/me/messages/{}/replyall'.format(self.message_id) self._make_api_call('post', endpoint, data=payload)
940,316
Moves the email to the folder specified by the folder parameter. Args: folder: A string containing the folder ID the message should be moved to, or a Folder instance
def move_to(self, folder): if isinstance(folder, Folder): self.move_to(folder.id) else: self._move_to(folder)
940,318
Emails from this contact will either always be put in the Focused inbox, or always put in Other, based on the value of is_focused. Args: account (OutlookAccount): The :class:`OutlookAccount <pyOutlook.core.main.OutlookAccount>` the override should be set for is_focused (bool): Whether this contact should be set to Focused, or Other. Returns: True if the request was successful
def set_focused(self, account, is_focused): # type: (OutlookAccount, bool) -> bool endpoint = 'https://outlook.office.com/api/v2.0/me/InferenceClassification/Overrides' if is_focused: classification = 'Focused' else: classification = 'Other' data = dict(ClassifyAs=classification, SenderEmailAddress=dict(Address=self.email)) r = requests.post(endpoint, headers=account._headers, data=json.dumps(data)) # Will raise an error if necessary, otherwise returns True result = check_response(r) self.focused = is_focused return result
940,356
Renames the Folder to the provided name. Args: new_folder_name: A string of the replacement name. Raises: AuthError: Raised if Outlook returns a 401, generally caused by an invalid or expired access token. Returns: A new Folder representing the folder with the new name on Outlook.
def rename(self, new_folder_name): headers = self.headers endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' + self.id payload = '{ "DisplayName": "' + new_folder_name + '"}' r = requests.patch(endpoint, headers=headers, data=payload) if check_response(r): return_folder = r.json() return self._json_to_folder(self.account, return_folder)
940,436
Creates a child folder within the Folder it is called from and returns the new Folder object. Args: folder_name: The name of the folder to create Returns: :class:`Folder <pyOutlook.core.folder.Folder>`
def create_child_folder(self, folder_name): headers = self.headers endpoint = 'https://outlook.office.com/api/v2.0/me/MailFolders/' + self.id + '/childfolders' payload = '{ "DisplayName": "' + folder_name + '"}' r = requests.post(endpoint, headers=headers, data=payload) if check_response(r): return_folder = r.json() return self._json_to_folder(self.account, return_folder)
940,440
Returns the decoded claims without verification of any kind. Args: token (str): A signed JWT to decode the headers from. Returns: dict: The dict representation of the token claims. Raises: JWTError: If there is an exception decoding the token.
def get_unverified_claims(token): try: claims = jws.get_unverified_claims(token) except: raise JWTError('Error decoding token claims.') try: claims = json.loads(claims.decode('utf-8')) except ValueError as e: raise JWTError('Invalid claims string: %s' % e) if not isinstance(claims, Mapping): raise JWTError('Invalid claims string: must be a json object') return claims
940,585
Validates that the 'at_hash' parameter included in the claims matches with the access_token returned alongside the id token as part of the authorization_code flow. Args: claims (dict): The claims dictionary to validate. access_token (str): The access token returned by the OpenID Provider. algorithm (str): The algorithm used to sign the JWT, as specified by the token headers.
def _validate_at_hash(claims, access_token, algorithm): if 'at_hash' not in claims and not access_token: return elif 'at_hash' in claims and not access_token: msg = 'No access_token provided to compare against at_hash claim.' raise JWTClaimsError(msg) elif access_token and 'at_hash' not in claims: msg = 'at_hash claim missing from token.' raise JWTClaimsError(msg) try: expected_hash = calculate_at_hash(access_token, ALGORITHMS.HASHES[algorithm]) except (TypeError, ValueError): msg = 'Unable to calculate at_hash to verify against token claims.' raise JWTClaimsError(msg) if claims['at_hash'] != expected_hash: raise JWTClaimsError('at_hash claim does not match access_token.')
940,586
Allows caller to use array indices to get a :class:`PageItem` Args: i (int): 0-based index of the element to retrieve Returns: PageItem: if valid item exists at index i None if the index is too small or too large
def __getitem__(self, key): if not isinstance(key, int): raise TypeError() return self.items[key]
941,712
Parsing passed text to json. Args: text: Text to parse. layers (optional): Special fields. Only one string or iterable object (e.g "Data", ("Data", "Fio")). Only these fields will be returned. Returns: The parsed text into a json object.
def parse(self, text, layers=None): params = { "text": text, "key": self.key, } if layers is not None: # if it's string if isinstance(layers, six.string_types): params["layers"] = layers # if it's another iterable object elif isinstance(layers, collections.Iterable): params["layers"] = ",".join(layers) req = requests.get(self.NLU_URL, params=params) return req.json()
943,357
Class for generate of speech. Args: speaker: Speaker. audio_format: Audio file format. key: API-key for Yandex speech kit. lang (optional): Language. Defaults to "ru-RU". emotion (optional): The color of the voice. Defaults to "normal". speed (optional): Speech tempo. Defaults to 1.0.
def __init__(self, speaker, audio_format, key, lang="ru-RU", **kwargs): self.__params = { "speaker": speaker, "format": audio_format, "key": key, "lang": lang, } self.__params.update(kwargs) self._data = None
943,358