code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def on_backward_begin(self, last_loss, last_output, **kwargs): "Record `last_loss` in the proper list." last_loss = last_loss.detach().cpu() if self.gen_mode: self.smoothenerG.add_value(last_loss) self.glosses.append(self.smoothenerG.smooth) self.last_gen = last_output.detach().cpu() else: self.smoothenerC.add_value(last_loss) self.closses.append(self.smoothenerC.smooth)
Record `last_loss` in the proper list.
def function_arg_count(fn): assert callable(fn), 'function_arg_count needed a callable function, not {0}'.format(repr(fn)) if hasattr(fn, '__code__') and hasattr(fn.__code__, 'co_argcount'): return fn.__code__.co_argcount else: return 1
returns how many arguments a funciton has
def DbImportDevice(self, argin): self._log.debug("In DbImportDevice()") return self.db.import_device(argin.lower())
Import a device from the database :param argin: Device name (or alias) :type: tango.DevString :return: Str[0] = device name Str[1] = CORBA IOR Str[2] = device version Str[3] = device server process name Str[4] = host name Str[5] = Tango class name Lg[0] = Exported flag Lg[1] = Device server process PID :rtype: tango.DevVarLongStringArray
def And(*predicates, **kwargs): if kwargs: predicates += Query(**kwargs), return _flatten(_And, *predicates)
`And` predicate. Returns ``False`` at the first sub-predicate that returns ``False``.
def write_short_at(self, n, pos, pack_into=Struct('>H').pack_into): if 0 <= n <= 0xFFFF: pack_into(self._output_buffer, pos, n) else: raise ValueError('Short %d out of range 0..0xFFFF', n) return self
Write an unsigned 16bit value at a specific position in the buffer. Used for writing tables and frames.
def plugins(self): if not self.loaded: self.load_modules() return get_plugins()[self.group]._filter(blacklist=self.blacklist, newest_only=True, type_filter=self.type_filter)
Newest version of all plugins in the group filtered by ``blacklist`` Returns: dict: Nested dictionary of plugins accessible through dot-notation. Plugins are returned in a nested dictionary, but can also be accessed through dot-notion. Just as when accessing an undefined dictionary key with index-notation, a :py:exc:`KeyError` will be raised if the plugin type or plugin does not exist. Parent types are always included. Child plugins will only be included if a valid, non-blacklisted plugin is available.
def add_vrf(self): v = VRF() if 'rt' in request.json: v.rt = validate_string(request.json, 'rt') if 'name' in request.json: v.name = validate_string(request.json, 'name') if 'description' in request.json: v.description = validate_string(request.json, 'description') if 'tags' in request.json: v.tags = request.json['tags'] if 'avps' in request.json: v.avps = request.json['avps'] try: v.save() except NipapError, e: return json.dumps({'error': 1, 'message': e.args, 'type': type(e).__name__}) return json.dumps(v, cls=NipapJSONEncoder)
Add a new VRF to NIPAP and return its data.
def _try_then(self): if self._cached is not None and self._callback is not None: self._callback(*self._cached[0], **self._cached[1])
Check to see if self has been resolved yet, if so invoke then.
def export_gcm_encrypted_private_key(self, password: str, salt: str, n: int = 16384) -> str: r = 8 p = 8 dk_len = 64 scrypt = Scrypt(n, r, p, dk_len) derived_key = scrypt.generate_kd(password, salt) iv = derived_key[0:12] key = derived_key[32:64] hdr = self.__address.b58encode().encode() mac_tag, cipher_text = AESHandler.aes_gcm_encrypt_with_iv(self.__private_key, hdr, key, iv) encrypted_key = bytes.hex(cipher_text) + bytes.hex(mac_tag) encrypted_key_str = base64.b64encode(bytes.fromhex(encrypted_key)) return encrypted_key_str.decode('utf-8')
This interface is used to export an AES algorithm encrypted private key with the mode of GCM. :param password: the secret pass phrase to generate the keys from. :param salt: A string to use for better protection from dictionary attacks. This value does not need to be kept secret, but it should be randomly chosen for each derivation. It is recommended to be at least 8 bytes long. :param n: CPU/memory cost parameter. It must be a power of 2 and less than 2**32 :return: an gcm encrypted private key in the form of string.
def body_encode(self, string): if not string: return string if self.body_encoding is BASE64: if isinstance(string, str): string = string.encode(self.output_charset) return email.base64mime.body_encode(string) elif self.body_encoding is QP: if isinstance(string, str): string = string.encode(self.output_charset) string = string.decode('latin1') return email.quoprimime.body_encode(string) else: if isinstance(string, str): string = string.encode(self.output_charset).decode('ascii') return string
Body-encode a string by converting it first to bytes. The type of encoding (base64 or quoted-printable) will be based on self.body_encoding. If body_encoding is None, we assume the output charset is a 7bit encoding, so re-encoding the decoded string using the ascii codec produces the correct string version of the content.
def copyData(self, source): for attr in self.copyAttributes: selfValue = getattr(self, attr) sourceValue = getattr(source, attr) if isinstance(selfValue, BaseObject): selfValue.copyData(sourceValue) else: setattr(self, attr, sourceValue)
Subclasses may override this method. If so, they should call the super.
def open_subscription_page(self, content_type): from .subscription_page import SubscriptionPage with self.term.loader('Loading {0}s'.format(content_type)): page = SubscriptionPage(self.reddit, self.term, self.config, self.oauth, content_type=content_type) if not self.term.loader.exception: return page
Open an instance of the subscriptions page with the selected content.
def create_context_menu_actions(self): actions = [] fnames = self.get_selected_filenames() new_actions = self.create_file_new_actions(fnames) if len(new_actions) > 1: new_act_menu = QMenu(_('New'), self) add_actions(new_act_menu, new_actions) actions.append(new_act_menu) else: actions += new_actions import_actions = self.create_file_import_actions(fnames) if len(import_actions) > 1: import_act_menu = QMenu(_('Import'), self) add_actions(import_act_menu, import_actions) actions.append(import_act_menu) else: actions += import_actions if actions: actions.append(None) if fnames: actions += self.create_file_manage_actions(fnames) if actions: actions.append(None) if fnames and all([osp.isdir(_fn) for _fn in fnames]): actions += self.create_folder_manage_actions(fnames) return actions
Create context menu actions
def duration(self): if self._closed or \ not self._result or \ "duration" not in self._result: return -1 return self._result.get("duration", 0)
This read-only attribute specifies the server-side duration of a query in milliseconds.
def pop(self, key, default=None): "Standard pop semantics for all mapping types" if not isinstance(key, tuple): key = (key,) return self.data.pop(key, default)
Standard pop semantics for all mapping types
def command(self, name, *args): args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8')) for arg in args if arg is not None ] + [None] _mpv_command(self.handle, (c_char_p*len(args))(*args))
Execute a raw command.
def from_source(source_name): meta_bucket = 'net-mozaws-prod-us-west-2-pipeline-metadata' store = S3Store(meta_bucket) try: source = json.loads(store.get_key('sources.json').read().decode('utf-8'))[source_name] except KeyError: raise Exception('Unknown source {}'.format(source_name)) schema = store.get_key('{}/schema.json'.format(source['metadata_prefix'])).read().decode('utf-8') dimensions = [f['field_name'] for f in json.loads(schema)['dimensions']] return Dataset(source['bucket'], dimensions, prefix=source['prefix'])
Create a Dataset configured for the given source_name This is particularly convenient when the user doesn't know the list of dimensions or the bucket name, but only the source name. Usage example:: records = Dataset.from_source('telemetry').where( docType='main', submissionDate='20160701', appUpdateChannel='nightly' )
def expect(self, pattern, timeout=-1, searchwindowsize=-1, async_=False, **kw): if 'async' in kw: async_ = kw.pop('async') if kw: raise TypeError("Unknown keyword arguments: {}".format(kw)) compiled_pattern_list = self.compile_pattern_list(pattern) return self.expect_list(compiled_pattern_list, timeout, searchwindowsize, async_)
This seeks through the stream until a pattern is matched. The pattern is overloaded and may take several types. The pattern can be a StringType, EOF, a compiled re, or a list of any of those types. Strings will be compiled to re types. This returns the index into the pattern list. If the pattern was not a list this returns index 0 on a successful match. This may raise exceptions for EOF or TIMEOUT. To avoid the EOF or TIMEOUT exceptions add EOF or TIMEOUT to the pattern list. That will cause expect to match an EOF or TIMEOUT condition instead of raising an exception. If you pass a list of patterns and more than one matches, the first match in the stream is chosen. If more than one pattern matches at that point, the leftmost in the pattern list is chosen. For example:: # the input is 'foobar' index = p.expect(['bar', 'foo', 'foobar']) # returns 1('foo') even though 'foobar' is a "better" match Please note, however, that buffering can affect this behavior, since input arrives in unpredictable chunks. For example:: # the input is 'foobar' index = p.expect(['foobar', 'foo']) # returns 0('foobar') if all input is available at once, # but returns 1('foo') if parts of the final 'bar' arrive late When a match is found for the given pattern, the class instance attribute *match* becomes an re.MatchObject result. Should an EOF or TIMEOUT pattern match, then the match attribute will be an instance of that exception class. The pairing before and after class instance attributes are views of the data preceding and following the matching pattern. On general exception, class attribute *before* is all data received up to the exception, while *match* and *after* attributes are value None. When the keyword argument timeout is -1 (default), then TIMEOUT will raise after the default value specified by the class timeout attribute. When None, TIMEOUT will not be raised and may block indefinitely until match. When the keyword argument searchwindowsize is -1 (default), then the value specified by the class maxread attribute is used. A list entry may be EOF or TIMEOUT instead of a string. This will catch these exceptions and return the index of the list entry instead of raising the exception. The attribute 'after' will be set to the exception type. The attribute 'match' will be None. This allows you to write code like this:: index = p.expect(['good', 'bad', pexpect.EOF, pexpect.TIMEOUT]) if index == 0: do_something() elif index == 1: do_something_else() elif index == 2: do_some_other_thing() elif index == 3: do_something_completely_different() instead of code like this:: try: index = p.expect(['good', 'bad']) if index == 0: do_something() elif index == 1: do_something_else() except EOF: do_some_other_thing() except TIMEOUT: do_something_completely_different() These two forms are equivalent. It all depends on what you want. You can also just expect the EOF if you are waiting for all output of a child to finish. For example:: p = pexpect.spawn('/bin/ls') p.expect(pexpect.EOF) print p.before If you are trying to optimize for speed then see expect_list(). On Python 3.4, or Python 3.3 with asyncio installed, passing ``async_=True`` will make this return an :mod:`asyncio` coroutine, which you can yield from to get the same result that this method would normally give directly. So, inside a coroutine, you can replace this code:: index = p.expect(patterns) With this non-blocking form:: index = yield from p.expect(patterns, async_=True)
def transpose(self, semitone): if semitone > 0 and semitone < 128: self.pianoroll[:, semitone:] = self.pianoroll[:, :(128 - semitone)] self.pianoroll[:, :semitone] = 0 elif semitone < 0 and semitone > -128: self.pianoroll[:, :(128 + semitone)] = self.pianoroll[:, -semitone:] self.pianoroll[:, (128 + semitone):] = 0
Transpose the pianoroll by a number of semitones, where positive values are for higher key, while negative values are for lower key. Parameters ---------- semitone : int The number of semitones to transpose the pianoroll.
def _validate_compression_params(self, img_array, cparams, colorspace): self._validate_j2k_colorspace(cparams, colorspace) self._validate_codeblock_size(cparams) self._validate_precinct_size(cparams) self._validate_image_rank(img_array) self._validate_image_datatype(img_array)
Check that the compression parameters are valid. Parameters ---------- img_array : ndarray Image data to be written to file. cparams : CompressionParametersType(ctypes.Structure) Corresponds to cparameters_t type in openjp2 headers.
def by_name(cls, name): if name not in cls._goal_by_name: cls._goal_by_name[name] = _Goal(name) return cls._goal_by_name[name]
Returns the unique object representing the goal of the specified name. :API: public
def add_string_pairs_from_text_field_element(xib_file, results, text_field, special_ui_components_prefix): text_field_entry_comment = extract_element_internationalized_comment(text_field) if text_field_entry_comment is None: return if text_field.hasAttribute('usesAttributedText') and text_field.attributes['usesAttributedText'].value == 'YES': add_string_pairs_from_attributed_ui_element(results, text_field, text_field_entry_comment) else: try: text_field_entry_key = text_field.attributes['text'].value results.append((text_field_entry_key, text_field_entry_comment + ' default text value')) except KeyError: pass try: text_field_entry_key = text_field.attributes['placeholder'].value results.append((text_field_entry_key, text_field_entry_comment + ' placeholder text value')) except KeyError: pass warn_if_element_not_of_class(text_field, 'TextField', special_ui_components_prefix)
Adds string pairs from a textfield element. Args: xib_file (str): Path to the xib file. results (list): The list to add the results to. text_field(element): The textfield element from the xib, to extract the string pairs from. special_ui_components_prefix (str): If not None, extraction will not warn about internationalized UI components with this class prefix.
def hook_point(self, hook_name, handle=None): full_hook_name = 'hook_' + hook_name for module in self.modules_manager.instances: _ts = time.time() if not hasattr(module, full_hook_name): continue fun = getattr(module, full_hook_name) try: fun(handle if handle is not None else self) except Exception as exp: logger.warning('The instance %s raised an exception %s. I disabled it,' ' and set it to restart later', module.name, str(exp)) logger.exception('Exception %s', exp) self.modules_manager.set_to_restart(module) else: statsmgr.timer('hook.%s.%s' % (hook_name, module.name), time.time() - _ts)
Used to call module function that may define a hook function for hook_name Available hook points: - `tick`, called on each daemon loop turn - `save_retention`; called by the scheduler when live state saving is to be done - `load_retention`; called by the scheduler when live state restoring is necessary (on restart) - `get_new_actions`; called by the scheduler before adding the actions to be executed - `early_configuration`; called by the arbiter when it begins parsing the configuration - `read_configuration`; called by the arbiter when it read the configuration - `late_configuration`; called by the arbiter when it finishes parsing the configuration As a default, the `handle` parameter provided to the hooked function is the caller Daemon object. The scheduler will provide its own instance when it call this function. :param hook_name: function name we may hook in module :type hook_name: str :param handle: parameter to provide to the hook function :type: handle: alignak.Satellite :return: None
def n1ql_index_create_primary(self, defer=False, ignore_exists=False): return self.n1ql_index_create( '', defer=defer, primary=True, ignore_exists=ignore_exists)
Create the primary index on the bucket. Equivalent to:: n1ql_index_create('', primary=True, **kwargs) :param bool defer: :param bool ignore_exists: .. seealso:: :meth:`create_index`
def clear_lock(self, back=None, remote=None): back = self.backends(back) cleared = [] errors = [] for fsb in back: fstr = '{0}.clear_lock'.format(fsb) if fstr in self.servers: good, bad = clear_lock(self.servers[fstr], fsb, remote=remote) cleared.extend(good) errors.extend(bad) return cleared, errors
Clear the update lock for the enabled fileserver backends back Only clear the update lock for the specified backend(s). The default is to clear the lock for all enabled backends remote If specified, then any remotes which contain the passed string will have their lock cleared.
def increment_bucket_count(self, value): if len(self._bounds) == 0: self._counts_per_bucket[0] += 1 return 0 for ii, bb in enumerate(self._bounds): if value < bb: self._counts_per_bucket[ii] += 1 return ii else: last_bucket_index = len(self._bounds) self._counts_per_bucket[last_bucket_index] += 1 return last_bucket_index
Increment the bucket count based on a given value from the user
def ascii_listing2program_dump(self, basic_program_ascii, program_start=None): if program_start is None: program_start = self.DEFAULT_PROGRAM_START basic_lines = self.ascii_listing2basic_lines(basic_program_ascii, program_start) program_dump=self.listing.basic_lines2program_dump(basic_lines, program_start) assert isinstance(program_dump, bytearray), ( "is type: %s and not bytearray: %s" % (type(program_dump), repr(program_dump)) ) return program_dump
convert a ASCII BASIC program listing into tokens. This tokens list can be used to insert it into the Emulator RAM.
def _smcra_to_str(self, smcra, temp_dir='/tmp/'): temp_path = tempfile.mktemp( '.pdb', dir=temp_dir ) io = PDBIO() io.set_structure(smcra) io.save(temp_path) f = open(temp_path, 'r') string = f.read() f.close() os.remove(temp_path) return string
WHATIF's input are PDB format files. Converts a SMCRA object to a PDB formatted string.
def zGetRefresh(self): OpticalSystem._dde_link.zGetRefresh() OpticalSystem._dde_link.zSaveFile(self._sync_ui_file) self._iopticalsystem.LoadFile (self._sync_ui_file, False)
Copy lens in UI to headless ZOS COM server
def get_pid(name, path=None): if name not in list_(limit='running', path=path): raise CommandExecutionError('Container {0} is not running, can\'t determine PID'.format(name)) info = __salt__['cmd.run']('lxc-info -n {0}'.format(name)).split("\n") pid = [line.split(':')[1].strip() for line in info if re.match(r'\s*PID', line)][0] return pid
Returns a container pid. Throw an exception if the container isn't running. CLI Example: .. code-block:: bash salt '*' lxc.get_pid name
def update(self, **kwargs): self.inflate() for model in self._models: model.update(**kwargs) return self
Update all resources in this collection.
def json_data(self): return { "number": self.number, "type": self.type, "participant_id": self.participant_id, "question": self.question, "response": self.response, }
Return json description of a question.
def _expand_disk(disk): ret = {} ret.update(disk.__dict__) zone = ret['extra']['zone'] ret['extra']['zone'] = {} ret['extra']['zone'].update(zone.__dict__) return ret
Convert the libcloud Volume object into something more serializable.
def handle_msec_timestamp(self, m, master): if m.get_type() == 'GLOBAL_POSITION_INT': return msec = m.time_boot_ms if msec + 30000 < master.highest_msec: self.say('Time has wrapped') print('Time has wrapped', msec, master.highest_msec) self.status.highest_msec = msec for mm in self.mpstate.mav_master: mm.link_delayed = False mm.highest_msec = msec return master.highest_msec = msec if msec > self.status.highest_msec: self.status.highest_msec = msec if msec < self.status.highest_msec and len(self.mpstate.mav_master) > 1 and self.mpstate.settings.checkdelay: master.link_delayed = True else: master.link_delayed = False
special handling for MAVLink packets with a time_boot_ms field
def has_table(self, table): sql = self._grammar.compile_table_exists() table = self._connection.get_table_prefix() + table return len(self._connection.select(sql, [table])) > 0
Determine if the given table exists. :param table: The table :type table: str :rtype: bool
def write_config(config, filename=None): if not filename: filename = CONFIG_DEFAULT_PATH with open(filename, 'w') as f: json.dump(config, f, indent=4)
Write the provided configuration to a specific location. Args: config (dict): a dictionary with the configuration to load. filename (str): the name of the file that will store the new configuration. Defaults to ``None``. If ``None``, the HOME of the current user and the string ``.bigchaindb`` will be used.
def number_crossing_m(x, m): if not isinstance(x, (np.ndarray, pd.Series)): x = np.asarray(x) positive = x > m return np.where(np.bitwise_xor(positive[1:], positive[:-1]))[0].size
Calculates the number of crossings of x on m. A crossing is defined as two sequential values where the first value is lower than m and the next is greater, or vice-versa. If you set m to zero, you will get the number of zero crossings. :param x: the time series to calculate the feature of :type x: numpy.ndarray :param m: the threshold for the crossing :type m: float :return: the value of this feature :return type: int
def mysql_aes_encrypt(val, key): assert isinstance(val, binary_type) or isinstance(val, text_type) assert isinstance(key, binary_type) or isinstance(key, text_type) k = _mysql_aes_key(_to_binary(key)) v = _mysql_aes_pad(_to_binary(val)) e = _mysql_aes_engine(k).encryptor() return e.update(v) + e.finalize()
Mysql AES encrypt value with secret key. :param val: Plain text value. :param key: The AES key. :returns: The encrypted AES value.
def getGenericInterface(interfaceVersion): error = EVRInitError() result = _openvr.VR_GetGenericInterface(interfaceVersion, byref(error)) _checkInitError(error.value) return result
Returns the interface of the specified version. This method must be called after VR_Init. The pointer returned is valid until VR_Shutdown is called.
def stack_pop(self): sp = self.regs.sp self.regs.sp = sp - self.arch.stack_change return self.memory.load(sp, self.arch.bytes, endness=self.arch.memory_endness)
Pops from the stack and returns the popped thing. The length will be the architecture word size.
def to_text(self, origin=None, relativize=True, **kw): return super(RRset, self).to_text(self.name, origin, relativize, self.deleting, **kw)
Convert the RRset into DNS master file format. @see: L{dns.name.Name.choose_relativity} for more information on how I{origin} and I{relativize} determine the way names are emitted. Any additional keyword arguments are passed on to the rdata to_text() method. @param origin: The origin for relative names, or None. @type origin: dns.name.Name object @param relativize: True if names should names be relativized @type relativize: bool
def _pid_to_id(self, pid): return d1_common.url.joinPathElements( self._base_url, self._version_tag, "resolve", d1_common.url.encodePathElement(pid), )
Converts a pid to a URI that can be used as an OAI-ORE identifier.
def to_grayscale(self): gray_data = cv2.cvtColor(self.data, cv2.COLOR_RGB2GRAY) return GrayscaleImage(gray_data, frame=self.frame)
Converts the color image to grayscale using OpenCV. Returns ------- :obj:`GrayscaleImage` Grayscale image corresponding to original color image.
def predict(self, X, with_noise=True): m, v = self._predict(X, False, with_noise) return m, np.sqrt(v)
Predictions with the model. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given. Parameters: X (np.ndarray) - points to run the prediction for. with_noise (bool) - whether to add noise to the prediction. Default is True.
def printDiagnostics(exp, sequences, objects, args, verbosity=0): print "Experiment start time:", time.ctime() print "\nExperiment arguments:" pprint.pprint(args) r = sequences.objectConfusion() print "Average common pairs in sequences=", r[0], print ", features=",r[2] r = objects.objectConfusion() print "Average common pairs in objects=", r[0], print ", locations=",r[1], print ", features=",r[2] if verbosity > 0: print "\nObjects are:" for o in objects: pairs = objects[o] pairs.sort() print str(o) + ": " + str(pairs) print "\nSequences:" for i in sequences: print i,sequences[i] print "\nNetwork parameters:" pprint.pprint(exp.config)
Useful diagnostics for debugging.
def _bp(editor, force=False): eb = editor.window_arrangement.active_editor_buffer if not force and eb.has_unsaved_changes: editor.show_message(_NO_WRITE_SINCE_LAST_CHANGE_TEXT) else: editor.window_arrangement.go_to_previous_buffer()
Go to previous buffer.
def from_notebook_node(self, nb, resources=None, **kw): from weasyprint import HTML, CSS nb = copy.deepcopy(nb) output, resources = super(OneCodexPDFExporter, self).from_notebook_node( nb, resources=resources, **kw ) buf = BytesIO() HTML(string=output).write_pdf( buf, stylesheets=[CSS(os.path.join(ASSETS_PATH, CSS_TEMPLATE_FILE))] ) buf.seek(0) return buf.read(), resources
Takes output of OneCodexHTMLExporter and runs Weasyprint to get a PDF.
def profile_associated(role_name, profile_name, region, key, keyid, profile): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: profiles = conn.list_instance_profiles_for_role(role_name) except boto.exception.BotoServerError as e: log.debug(e) return False profiles = profiles.list_instance_profiles_for_role_response profiles = profiles.list_instance_profiles_for_role_result profiles = profiles.instance_profiles for profile in profiles: if profile.instance_profile_name == profile_name: return True return False
Check to see if an instance profile is associated with an IAM role. CLI Example: .. code-block:: bash salt myminion boto_iam.profile_associated myirole myiprofile
def step(step_name=None): def decorator(func): if step_name: name = step_name else: name = func.__name__ add_step(name, func) return func return decorator
Decorates functions that will be called by the `run` function. Decorator version of `add_step`. step name defaults to name of function. The function's argument names and keyword argument values will be matched to registered variables when the function needs to be evaluated by Orca. The argument name "iter_var" may be used to have the current iteration variable injected.
def decode(self, bytes): opcode = struct.unpack(">H", bytes[0:2])[0] nbytes = struct.unpack("B", bytes[2:3])[0] name = None args = [] if opcode in self.opcodes: defn = self.opcodes[opcode] name = defn.name stop = 3 for arg in defn.argdefns: start = stop stop = start + arg.nbytes if arg.fixed: pass else: args.append(arg.decode(bytes[start:stop])) return self.create(name, *args)
Decodes the given bytes according to this AIT Command Definition.
def get_sequence(self,chr=None,start=None,end=None,dir=None,rng=None): if rng: chr = rng.chr start = rng.start end = rng.end dir = rng.direction if not start: start = 1 if not end: end = self.fai[chr]['length'] if not dir: dir = '+' if dir == '-': return sequence.Sequence.rc(self._seqs[chr][start-1:end]) return self._seqs[chr][start-1:end]
get a sequence :param chr: :param start: :param end: :param dir: charcter +/- :parma rng: :type chr: string :type start: int :type end: int :type dir: char :type rng: GenomicRange :return: sequence :rtype: string
def ask_question(self, question_text, question=None): if question is not None: q = question.to_dict() else: q = WatsonQuestion(question_text).to_dict() r = requests.post(self.url + '/question', json={'question': q}, headers={ 'Accept': 'application/json', 'X-SyncTimeout': 30 }, auth=(self.username, self.password)) try: response_json = r.json() except ValueError: raise Exception('Failed to parse response JSON') return WatsonAnswer(response_json)
Ask Watson a question via the Question and Answer API :param question_text: question to ask Watson :type question_text: str :param question: if question_text is not provided, a Question object representing the question to ask Watson :type question: WatsonQuestion :return: Answer
def validate(self, password, user=None): user_inputs = [] if user is not None: for attribute in self.user_attributes: if hasattr(user, attribute): user_inputs.append(getattr(user, attribute)) results = zxcvbn(password, user_inputs=user_inputs) if results.get('score', 0) < self.min_score: feedback = ', '.join( results.get('feedback', {}).get('suggestions', [])) raise ValidationError(_(feedback), code=self.code, params={})
Validate method, run zxcvbn and check score.
def start_consuming(self, to_tuple=False, auto_decode=True): while not self.is_closed: self.process_data_events( to_tuple=to_tuple, auto_decode=auto_decode ) if self.consumer_tags: sleep(IDLE_WAIT) continue break
Start consuming messages. :param bool to_tuple: Should incoming messages be converted to a tuple before delivery. :param bool auto_decode: Auto-decode strings when possible. :raises AMQPChannelError: Raises if the channel encountered an error. :raises AMQPConnectionError: Raises if the connection encountered an error. :return:
def override_unit(self, unit, parse_strict='raise'): self._unit = parse_unit(unit, parse_strict=parse_strict)
Forcefully reset the unit of these data Use of this method is discouraged in favour of `to()`, which performs accurate conversions from one unit to another. The method should really only be used when the original unit of the array is plain wrong. Parameters ---------- unit : `~astropy.units.Unit`, `str` the unit to force onto this array parse_strict : `str`, optional how to handle errors in the unit parsing, default is to raise the underlying exception from `astropy.units` Raises ------ ValueError if a `str` cannot be parsed as a valid unit
def read_csv( filename: Union[PathLike, Iterator[str]], delimiter: Optional[str]=',', first_column_names: Optional[bool]=None, dtype: str='float32', ) -> AnnData: return read_text(filename, delimiter, first_column_names, dtype)
Read ``.csv`` file. Same as :func:`~anndata.read_text` but with default delimiter ``','``. Parameters ---------- filename Data file. delimiter Delimiter that separates data within text file. If ``None``, will split at arbitrary number of white spaces, which is different from enforcing splitting at single white space ``' '``. first_column_names Assume the first column stores row names. dtype Numpy data type.
def layout_json(self): file_fqpn = os.path.join(self.app_path, 'layout.json') if self._layout_json is None: if os.path.isfile(file_fqpn): try: with open(file_fqpn, 'r') as fh: self._layout_json = json.load(fh) except ValueError as e: self.handle_error('Failed to load "{}" file ({}).'.format(file_fqpn, e)) else: self.handle_error('File "{}" could not be found.'.format(file_fqpn)) return self._layout_json
Return layout.json contents.
def traverse_inventory(self, item_filter=None): not self._intentory_raw and self._get_inventory_raw() for item in self._intentory_raw['rgDescriptions'].values(): tags = item['tags'] for tag in tags: internal_name = tag['internal_name'] if item_filter is None or internal_name == item_filter: item_type = Item if internal_name == TAG_ITEM_CLASS_CARD: item_type = Card appid = item['market_fee_app'] title = item['name'] yield item_type(appid, title)
Generates market Item objects for each inventory item. :param str item_filter: See `TAG_ITEM_CLASS_` contants from .market module.
def encode_date_optional_time(obj): if isinstance(obj, datetime.datetime): return timezone("UTC").normalize(obj.astimezone(timezone("UTC"))).strftime('%Y-%m-%dT%H:%M:%SZ') raise TypeError("{0} is not JSON serializable".format(repr(obj)))
ISO encode timezone-aware datetimes
def _data_flow_chain(self): if self.data_producer is None: return [] res = [] ds = self.data_producer while not ds.is_reader: res.append(ds) ds = ds.data_producer res.append(ds) res = res[::-1] return res
Get a list of all elements in the data flow graph. The first element is the original source, the next one reads from the prior and so on and so forth. Returns ------- list: list of data sources
def _getFullPolicyName(policy_item, policy_name, return_full_policy_names, adml_language): adml_data = _get_policy_resources(language=adml_language) if policy_name in adm_policy_name_map[return_full_policy_names]: return adm_policy_name_map[return_full_policy_names][policy_name] if return_full_policy_names and 'displayName' in policy_item.attrib: fullPolicyName = _getAdmlDisplayName(adml_data, policy_item.attrib['displayName']) if fullPolicyName: adm_policy_name_map[return_full_policy_names][policy_name] = fullPolicyName policy_name = fullPolicyName elif return_full_policy_names and 'id' in policy_item.attrib: fullPolicyName = _getAdmlPresentationRefId(adml_data, policy_item.attrib['id']) if fullPolicyName: adm_policy_name_map[return_full_policy_names][policy_name] = fullPolicyName policy_name = fullPolicyName policy_name = policy_name.rstrip(':').rstrip() return policy_name
helper function to retrieve the full policy name if needed
def join(cls, splits, *namables): isplits = [] unbound = [] for ref in splits: if isinstance(ref, Ref): resolved = False for namable in namables: try: value = namable.find(ref) resolved = True break except Namable.Error: continue if resolved: isplits.append(value) else: isplits.append(ref) unbound.append(ref) else: isplits.append(ref) return (''.join(map(str if Compatibility.PY3 else unicode, isplits)), unbound)
Interpolate strings. :params splits: The output of Parser.split(string) :params namables: A sequence of Namable objects in which the interpolation should take place. Returns 2-tuple containing: joined string, list of unbound object ids (potentially empty)
def mk_dict(results,description): rows=[] for row in results: row_dict={} for idx in range(len(row)): col=description[idx][0] row_dict[col]=row[idx] rows.append(row_dict) return rows
Given a result list and descrition sequence, return a list of dictionaries
def avro_name(url): frg = urllib.parse.urldefrag(url)[1] if frg != '': if '/' in frg: return frg[frg.rindex('/') + 1:] return frg return url
Turn a URL into an Avro-safe name. If the URL has no fragment, return this plain URL. Extract either the last part of the URL fragment past the slash, otherwise the whole fragment.
def _build_filter_methods(cls, **meths): doc = make_ifilter = lambda ftype: (lambda self, *a, **kw: self.ifilter(forcetype=ftype, *a, **kw)) make_filter = lambda ftype: (lambda self, *a, **kw: self.filter(forcetype=ftype, *a, **kw)) for name, ftype in (meths.items() if py3k else meths.iteritems()): ifilter = make_ifilter(ftype) filter = make_filter(ftype) ifilter.__doc__ = doc.format(name, "ifilter", ftype) filter.__doc__ = doc.format(name, "filter", ftype) setattr(cls, "ifilter_" + name, ifilter) setattr(cls, "filter_" + name, filter)
Given Node types, build the corresponding i?filter shortcuts. The should be given as keys storing the method's base name paired with values storing the corresponding :class:`.Node` type. For example, the dict may contain the pair ``("templates", Template)``, which will produce the methods :meth:`ifilter_templates` and :meth:`filter_templates`, which are shortcuts for :meth:`ifilter(forcetype=Template) <ifilter>` and :meth:`filter(forcetype=Template) <filter>`, respectively. These shortcuts are added to the class itself, with an appropriate docstring.
def background(cl, proto=EchoProcess, **kw): if isinstance(cl, basestring): cl = shlex.split(cl) if not cl[0].startswith('/'): path = which(cl[0]) assert path, '%s not found' % cl[0] cl[0] = path[0] d = Deferred() proc = reactor.spawnProcess( proto(name=basename(cl[0]), deferred=d), cl[0], cl, env=os.environ, **kw) daycare.add(proc.pid) return d
Use the reactor to run a process in the background. Keep the pid around. ``proto'' may be any callable which returns an instance of ProcessProtocol
def write_cert_items(xml_tree, records, api_ver=3, app_id=None, app_ver=None): if not records or not should_include_certs(app_id, app_ver): return certItems = etree.SubElement(xml_tree, 'certItems') for item in records: if item.get('subject') and item.get('pubKeyHash'): cert = etree.SubElement(certItems, 'certItem', subject=item['subject'], pubKeyHash=item['pubKeyHash']) else: cert = etree.SubElement(certItems, 'certItem', issuerName=item['issuerName']) serialNumber = etree.SubElement(cert, 'serialNumber') serialNumber.text = item['serialNumber']
Generate the certificate blocklists. <certItem issuerName="MIGQMQswCQYD...IENB"> <serialNumber>UoRGnb96CUDTxIqVry6LBg==</serialNumber> </certItem> or <certItem subject='MCIxIDAeBgNVBAMMF0Fub3RoZXIgVGVzdCBFbmQtZW50aXR5' pubKeyHash='VCIlmPM9NkgFQtrs4Oa5TeFcDu6MWRTKSNdePEhOgD8='> </certItem>
def process_formdata(self, valuelist): if valuelist: self.data = '\n'.join([ x.strip() for x in filter(lambda x: x, '\n'.join(valuelist).splitlines()) ])
Process form data.
def generate_big_urls_glove(bigurls=None): bigurls = bigurls or {} for num_dim in (50, 100, 200, 300): for suffixes, num_words in zip( ('sm -sm _sm -small _small'.split(), 'med -med _med -medium _medium'.split(), 'lg -lg _lg -large _large'.split()), (6, 42, 840) ): for suf in suffixes[:-1]: name = 'glove' + suf + str(num_dim) dirname = 'glove.{num_words}B'.format(num_words=num_words) filename = dirname + '.{num_dim}d.w2v.txt'.format(num_dim=num_dim) bigurl_tuple = BIG_URLS['glove' + suffixes[-1]] bigurls[name] = list(bigurl_tuple[:2]) bigurls[name].append(os.path.join(dirname, filename)) bigurls[name].append(load_glove) bigurls[name] = tuple(bigurls[name]) return bigurls
Generate a dictionary of URLs for various combinations of GloVe training set sizes and dimensionality
def normpath(path): normalized = os.path.join(*path.split("/")) if os.path.isabs(path): return os.path.abspath("/") + normalized else: return normalized
Normalize UNIX path to a native path.
def _from_dict(cls, _dict): args = {} if 'key' in _dict: args['key'] = Key._from_dict(_dict.get('key')) if 'value' in _dict: args['value'] = Value._from_dict(_dict.get('value')) return cls(**args)
Initialize a KeyValuePair object from a json dictionary.
def addError(self, test, exception): result = self._handle_result( test, TestCompletionStatus.error, exception=exception) self.errors.append(result) self._mirror_output = True
Register that a test ended in an error. Parameters ---------- test : unittest.TestCase The test that has completed. exception : tuple ``exc_info`` tuple ``(type, value, traceback)``.
def join(self, timeout=None): remaining = timeout while self._cb_poll and (remaining is None or remaining > 0): now = time.time() rv = self._cb_poll.poll(remaining) if remaining is not None: remaining -= (time.time() - now) for command_buffer, event in rv: if command_buffer.has_pending_requests: if event == 'close': self._try_reconnect(command_buffer) elif event == 'write': self._send_or_reconnect(command_buffer) elif event in ('read', 'close'): try: command_buffer.wait_for_responses(self) finally: self._release_command_buffer(command_buffer) if self._cb_poll and timeout is not None: raise TimeoutError('Did not receive all data in time.')
Waits for all outstanding responses to come back or the timeout to be hit.
def tobinary(self): entrylen = struct.calcsize(self.ENTRYSTRUCT) rslt = [] for (dpos, dlen, ulen, flag, typcd, nm) in self.data: nmlen = len(nm) + 1 toclen = nmlen + entrylen if toclen % 16 == 0: pad = '\0' else: padlen = 16 - (toclen % 16) pad = '\0'*padlen nmlen = nmlen + padlen rslt.append(struct.pack(self.ENTRYSTRUCT+`nmlen`+'s', nmlen+entrylen, dpos, dlen, ulen, flag, typcd, nm+pad)) return ''.join(rslt)
Return self as a binary string.
def lux_unit(self): if CONST.UNIT_LUX in self._get_status(CONST.LUX_STATUS_KEY): return CONST.LUX return None
Get unit of lux.
def convert_crop(node, **kwargs): name, inputs, attrs = get_inputs(node, kwargs) num_inputs = len(inputs) y, x = list(parse_helper(attrs, "offset", [0, 0])) h, w = list(parse_helper(attrs, "h_w", [0, 0])) if num_inputs > 1: h, w = kwargs["out_shape"][-2:] border = [x, y, x + w, y + h] crop_node = onnx.helper.make_node( "Crop", inputs=[inputs[0]], outputs=[name], border=border, scale=[1, 1], name=name ) logging.warning( "Using an experimental ONNX operator: Crop. " \ "Its definition can change.") return [crop_node]
Map MXNet's crop operator attributes to onnx's Crop operator and return the created node.
def info(dev): if 'sys' in dev: qtype = 'path' else: qtype = 'name' cmd = 'udevadm info --export --query=all --{0}={1}'.format(qtype, dev) udev_result = __salt__['cmd.run_all'](cmd, output_loglevel='quiet') if udev_result['retcode'] != 0: raise CommandExecutionError(udev_result['stderr']) return _parse_udevadm_info(udev_result['stdout'])[0]
Extract all info delivered by udevadm CLI Example: .. code-block:: bash salt '*' udev.info /dev/sda salt '*' udev.info /sys/class/net/eth0
def flat_map(self, flatmap_fn): op = Operator( _generate_uuid(), OpType.FlatMap, "FlatMap", flatmap_fn, num_instances=self.env.config.parallelism) return self.__register(op)
Applies a flatmap operator to the stream. Attributes: flatmap_fn (function): The user-defined logic of the flatmap (e.g. split()).
def merge(self, *args): values = [] for entry in args: values = values + list(entry.items()) return dict(values)
Merge multiple dictionary objects into one. :param variadic args: Multiple dictionary items :return dict
def and_return_future(self, *return_values): futures = [] for value in return_values: future = _get_future() future.set_result(value) futures.append(future) return self.and_return(*futures)
Similar to `and_return` but the doubled method returns a future. :param object return_values: The values the double will return when called,
def _get_data(self, url, accept=None): if self.parsed_endpoint.scheme == 'https': conn = httplib.HTTPSConnection(self.parsed_endpoint.netloc) else: conn = httplib.HTTPConnection(self.parsed_endpoint.netloc) head = { "User-Agent": USER_AGENT, API_TOKEN_HEADER_NAME: self.api_token, } if self.api_version in ['0.1', '0.01a']: head[API_VERSION_HEADER_NAME] = self.api_version if accept: head['Accept'] = accept conn.request("GET", url, "", head) resp = conn.getresponse() self._handle_response_errors('GET', url, resp) content_type = resp.getheader('content-type') if 'application/json' in content_type: return json.loads(resp.read()) return resp.read()
GETs the resource at url and returns the raw response If the accept parameter is not None, the request passes is as the Accept header
def Close(self): if not self._connection: raise RuntimeError('Cannot close database not opened.') self._connection.commit() self._connection.close() self._connection = None self._cursor = None self.filename = None self.read_only = None
Closes the database file. Raises: RuntimeError: if the database is not opened.
def build_acl(self, tenant_name, rule): if rule['action'] == 'allow': action = 'permit' else: action = 'deny' acl_str = "access-list %(tenant)s extended %(action)s %(prot)s " acl = acl_str % {'tenant': tenant_name, 'action': action, 'prot': rule.get('protocol')} src_ip = self.get_ip_address(rule.get('source_ip_address')) ip_acl = self.build_acl_ip(src_ip) acl += ip_acl acl += self.build_acl_port(rule.get('source_port')) dst_ip = self.get_ip_address(rule.get('destination_ip_address')) ip_acl = self.build_acl_ip(dst_ip) acl += ip_acl acl += self.build_acl_port(rule.get('destination_port'), enabled=rule.get('enabled')) return acl
Build the ACL.
def get_task_summary(self, task_name): params = {'instancesummary': '', 'taskname': task_name} resp = self._client.get(self.resource(), params=params) map_reduce = resp.json().get('Instance') if map_reduce: json_summary = map_reduce.get('JsonSummary') if json_summary: summary = Instance.TaskSummary(json.loads(json_summary)) summary.summary_text = map_reduce.get('Summary') summary.json_summary = json_summary return summary
Get a task's summary, mostly used for MapReduce. :param task_name: task name :return: summary as a dict parsed from JSON :rtype: dict
def log_rule_info(self): for c in sorted(self.broker.get_by_type(rule), key=dr.get_name): v = self.broker[c] _type = v.get("type") if _type: if _type != "skip": msg = "Running {0} ".format(dr.get_name(c)) self.logit(msg, self.pid, self.user, "insights-run", logging.INFO) else: msg = "Rule skipped {0} ".format(dr.get_name(c)) self.logit(msg, self.pid, self.user, "insights-run", logging.WARNING)
Collects rule information and send to logit function to log to syslog
def readPlist(pathOrFile): didOpen = False result = None if isinstance(pathOrFile, (bytes, unicode)): pathOrFile = open(pathOrFile, 'rb') didOpen = True try: reader = PlistReader(pathOrFile) result = reader.parse() except NotBinaryPlistException as e: try: pathOrFile.seek(0) result = None if hasattr(plistlib, 'loads'): contents = None if isinstance(pathOrFile, (bytes, unicode)): with open(pathOrFile, 'rb') as f: contents = f.read() else: contents = pathOrFile.read() result = plistlib.loads(contents) else: result = plistlib.readPlist(pathOrFile) result = wrapDataObject(result, for_binary=True) except Exception as e: raise InvalidPlistException(e) finally: if didOpen: pathOrFile.close() return result
Raises NotBinaryPlistException, InvalidPlistException
def add_tagfile(self, path, timestamp=None): self.self_check() checksums = {} if os.path.isdir(path): return with open(path, "rb") as tag_file: checksums[SHA1] = checksum_copy(tag_file, hasher=hashlib.sha1) tag_file.seek(0) checksums[SHA256] = checksum_copy(tag_file, hasher=hashlib.sha256) tag_file.seek(0) checksums[SHA512] = checksum_copy(tag_file, hasher=hashlib.sha512) rel_path = _posix_path(os.path.relpath(path, self.folder)) self.tagfiles.add(rel_path) self.add_to_manifest(rel_path, checksums) if timestamp is not None: self._file_provenance[rel_path] = {"createdOn": timestamp.isoformat()}
Add tag files to our research object.
def create_or_update_export_configuration(self, export_config): search_string = json.dumps(obj=export_config.search.as_dict()) user_id = export_config.user_id password = export_config.password target_url = export_config.target_url enabled = export_config.enabled name = export_config.name description = export_config.description export_type = export_config.type if export_config.config_id is not None: self.con.execute( 'UPDATE archive_exportConfig c ' 'SET c.searchString = %s, c.targetUrl = %s, c.targetUser = %s, c.targetPassword = %s, ' 'c.exportName = %s, c.description = %s, c.active = %s, c.exportType = %s ' 'WHERE c.exportConfigId = %s', (search_string, target_url, user_id, password, name, description, enabled, export_type, export_config.config_id)) else: item_id = mp.get_hash(mp.now(), name, export_type) self.con.execute( 'INSERT INTO archive_exportConfig ' '(searchString, targetUrl, targetUser, targetPassword, ' 'exportName, description, active, exportType, exportConfigId) ' 'VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s) ', (search_string, target_url, user_id, password, name, description, enabled, export_type, item_id)) export_config.config_id = item_id return export_config
Create a new file export configuration or update an existing one :param ExportConfiguration export_config: a :class:`meteorpi_model.ExportConfiguration` containing the specification for the export. If this doesn't include a 'config_id' field it will be inserted as a new record in the database and the field will be populated, updating the supplied object. If it does exist already this will update the other properties in the database to match the supplied object. :returns: The supplied :class:`meteorpi_model.ExportConfiguration` as stored in the DB. This is guaranteed to have its 'config_id' string field defined.
def segment(f, output, target_duration, mpegts): try: target_duration = int(target_duration) except ValueError: exit('Error: Invalid target duration.') try: mpegts = int(mpegts) except ValueError: exit('Error: Invalid MPEGTS value.') WebVTTSegmenter().segment(f, output, target_duration, mpegts)
Segment command.
def content(self): if not self._content: self._content = self._read() return self._content
Get the file contents. This property is cached. The file is only read once.
def unmapped(name, config='/etc/crypttab', persist=True, immediate=False): ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''} if immediate: active = __salt__['cryptdev.active']() if name in active.keys(): if __opts__['test']: ret['result'] = None ret['commment'] = 'Device would be unmapped immediately' else: cryptsetup_result = __salt__['cryptdev.close'](name) if cryptsetup_result: ret['changes']['cryptsetup'] = 'Device unmapped using cryptsetup' else: ret['changes']['cryptsetup'] = 'Device failed to unmap using cryptsetup' ret['result'] = False if persist and not __opts__['test']: crypttab_result = __salt__['cryptdev.rm_crypttab'](name, config=config) if crypttab_result: if crypttab_result == 'change': ret['changes']['crypttab'] = 'Entry removed from {0}'.format(config) else: ret['changes']['crypttab'] = 'Unable to remove entry in {0}'.format(config) ret['result'] = False return ret
Ensure that a device is unmapped name The name to ensure is not mapped config Set an alternative location for the crypttab, if the map is persistent, Default is ``/etc/crypttab`` persist Set if the map should be removed from the crypttab. Default is ``True`` immediate Set if the device should be unmapped immediately. Default is ``False``.
def publish_gsi_notification( table_key, gsi_key, message, message_types, subject=None): topic = get_gsi_option(table_key, gsi_key, 'sns_topic_arn') if not topic: return for message_type in message_types: if (message_type in get_gsi_option(table_key, gsi_key, 'sns_message_types')): __publish(topic, message, subject) return
Publish a notification for a specific GSI :type table_key: str :param table_key: Table configuration option key name :type gsi_key: str :param gsi_key: Table configuration option key name :type message: str :param message: Message to send via SNS :type message_types: list :param message_types: List with types: - scale-up - scale-down - high-throughput-alarm - low-throughput-alarm :type subject: str :param subject: Subject to use for e-mail notifications :returns: None
def get_log_hierarchy_session(self, proxy): if not self.supports_log_hierarchy(): raise errors.Unimplemented() return sessions.LogHierarchySession(proxy=proxy, runtime=self._runtime)
Gets the ``OsidSession`` associated with the log hierarchy service. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.logging.LogHierarchySession) - a ``LogHierarchySession`` for logs raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_log_hierarchy()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_log_hierarchy()`` is ``true``.*
def bulk_copy(self, ids): schema = self.GET_SCHEMA return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema)
Bulk copy a set of configs. :param ids: Int list of config IDs. :return: :class:`configs.Config <configs.Config>` list
def readMixedArray(self): self.stream.read_ulong() obj = pyamf.MixedArray() self.context.addObject(obj) attrs = self.readObjectAttributes(obj) for key in attrs.keys(): try: key = int(key) except ValueError: pass obj[key] = attrs[key] return obj
Read mixed array. @rtype: L{pyamf.MixedArray}
def get_table_names(self, connection, schema=None, **kw): return self._get_table_or_view_names('r', connection, schema, **kw)
Return a list of table names for `schema`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_table_names`.
def load_user_config(args, log): if not os.path.exists(_CONFIG_PATH): err_str = ( "Configuration file does not exists ({}).\n".format(_CONFIG_PATH) + "Run `python -m astrocats setup` to configure.") log_raise(log, err_str) config = json.load(open(_CONFIG_PATH, 'r')) setattr(args, _BASE_PATH_KEY, config[_BASE_PATH_KEY]) log.debug("Loaded configuration: {}: {}".format(_BASE_PATH_KEY, config[ _BASE_PATH_KEY])) return args
Load settings from the user's confiuration file, and add them to `args`. Settings are loaded from the configuration file in the user's home directory. Those parameters are added (as attributes) to the `args` object. Arguments --------- args : `argparse.Namespace` Namespace object to which configuration attributes will be added. Returns ------- args : `argparse.Namespace` Namespace object with added attributes.
def change_password(self, username, newpassword, raise_on_error=False): response = self._put(self.rest_url + "/user/password", data=json.dumps({"value": newpassword}), params={"username": username}) if response.ok: return True if raise_on_error: raise RuntimeError(response.json()['message']) return False
Change new password for a user Args: username: The account username. newpassword: The account new password. raise_on_error: optional (default: False) Returns: True: Succeeded False: If unsuccessful
def set_system_lock(cls, redis, name, timeout): pipeline = redis.pipeline() pipeline.zadd(name, SYSTEM_LOCK_ID, time.time() + timeout) pipeline.expire(name, timeout + 10) pipeline.execute()
Set system lock for the semaphore. Sets a system lock that will expire in timeout seconds. This overrides all other locks. Existing locks cannot be renewed and no new locks will be permitted until the system lock expires. Arguments: redis: Redis client name: Name of lock. Used as ZSET key. timeout: Timeout in seconds for system lock
def _send_output(self, message_body=None): self._buffer.extend((bytes(b""), bytes(b""))) msg = bytes(b"\r\n").join(self._buffer) del self._buffer[:] if isinstance(message_body, bytes): msg += message_body message_body = None self.send(msg) if message_body is not None: self.send(message_body)
Send the currently buffered request and clear the buffer. Appends an extra \\r\\n to the buffer. A message_body may be specified, to be appended to the request.