code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def evaluate_accuracy(data_iterator, net): acc = mx.metric.Accuracy() for data, label in data_iterator: output = net(data) predictions = nd.argmax(output, axis=1) predictions = predictions.reshape((-1, 1)) acc.update(preds=predictions, labels=label) return acc.get()[1]
Function to evaluate accuracy of any data iterator passed to it as an argument
def update(self, _attributes=None, **attributes): if _attributes is not None: attributes.update(_attributes) instance = self.get_results() return instance.fill(attributes).save()
Update the parent model on the relationship. :param attributes: The update attributes :type attributes: dict :rtype: mixed
def delete(self, container_id=None, sudo=None): sudo = self._get_sudo(sudo) container_id = self.get_container_id(container_id) cmd = self._init_command('delete') cmd.append(container_id) return self._run_and_return(cmd, sudo=sudo)
delete an instance based on container_id. Parameters ========== container_id: the container_id to delete sudo: whether to issue the command with sudo (or not) a container started with sudo will belong to the root user If started by a user, the user needs to control deleting it if the user doesn't set to True/False, we use client self.sudo Returns ======= return_code: the return code from the delete command. 0 indicates a successful delete, 255 indicates not.
def add_multiple(self, flags): if not isinstance(flags, list): raise TypeError("Expected list of flags, got object of type{}".format(type(flags))) for flag in flags: if isinstance(flag, Flag): self.add_item(flag) elif isinstance(flag, tuple): try: item = Flag(*flag) self.add_item(item) except TypeError as e: raise TypeError("Invalid arguments to initialize a flag definition, expect ({0} [, {1}]) but got {3}" .format(", ".join(Flag.REQUIRED_FIELDS), ", ".join(Flag.OPTIONAL_FIELDS), flag))
Add multiple command line flags Arguments: flags (:obj:`list` of :obj:`tuple`): List of flags in tuples (name, flag_type, description, (optional) default) Raises: TypeError: Provided wrong arguments or arguments of wrong types, method will raise TypeError
def load_dwg(file_obj, **kwargs): data = file_obj.read() converted = _teigha_convert(data) result = load_dxf(util.wrap_as_stream(converted)) return result
Load DWG files by converting them to DXF files using TeighaFileConverter. Parameters ------------- file_obj : file- like object Returns ------------- loaded : dict kwargs for a Path2D constructor
def list_qos_rule_types(self, retrieve_all=True, **_params): return self.list('rule_types', self.qos_rule_types_path, retrieve_all, **_params)
List available qos rule types.
def _set_textarea(el, value): if isinstance(value, dict): el.text = value["val"] elif type(value) in [list, tuple]: el.text = "\n\n".join( "-- %s --\n%s" % (item["source"], item["val"]) for item in value ) else: el.text = value
Set content of given textarea element `el` to `value`. Args: el (obj): Reference to textarea element you wish to set. value (obj/list): Value to which the `el` will be set.
def disconnect_socket(self): self.running = False if self.socket is not None: if self.__need_ssl(): try: self.socket = self.socket.unwrap() except Exception: _, e, _ = sys.exc_info() log.warning(e) elif hasattr(socket, 'SHUT_RDWR'): try: self.socket.shutdown(socket.SHUT_RDWR) except socket.error: _, e, _ = sys.exc_info() if get_errno(e) != errno.ENOTCONN: log.warning("Unable to issue SHUT_RDWR on socket because of error '%s'", e) if self.socket is not None: try: self.socket.close() except socket.error: _, e, _ = sys.exc_info() log.warning("Unable to close socket because of error '%s'", e) self.current_host_and_port = None self.socket = None self.notify('disconnected')
Disconnect the underlying socket connection
def find_geom(geom, geoms): for i, g in enumerate(geoms): if g is geom: return i
Returns the index of a geometry in a list of geometries avoiding expensive equality checks of `in` operator.
def get_item_dicts(self, buckets=None, results=15, start=0,item_ids=None): kwargs = {} kwargs['bucket'] = buckets or [] kwargs['item_id'] = item_ids or [] response = self.get_attribute("read", results=results, start=start, **kwargs) rval = ResultList(response['catalog']['items']) if item_ids: rval.start=0; rval.total=len(response['catalog']['items']) else: rval.start = response['catalog']['start'] rval.total = response['catalog']['total'] return rval
Returns data from the catalog; also expanded for the requested buckets Args: Kwargs: buckets (list): A list of strings specifying which buckets to retrieve results (int): An integer number of results to return start (int): An integer starting value for the result set Returns: A list of dicts representing objects in the catalog; list has additional attributes 'start' and 'total' Example: >>> c <catalog - my_songs> >>> c.read_items(results=1) [ { "artist_id": "AR78KRI1187B98E6F2", "artist_name": "Art of Noise", "date_added": "2012-04-02T16:50:02", "foreign_id": "CAHLYLR13674D1CF83:song:1000", "request": { "artist_name": "The Art Of Noise", "item_id": "1000", "song_name": "Love" }, "song_id": "SOSBCTO1311AFE7AE0", "song_name": "Love" } ]
def generate_response_property(name=None, value=None): name = name or "dump2polarion" value = value or "".join(random.sample(string.ascii_lowercase, 12)) return (name, value)
Generates response property.
def stringify(metrics_headers=()): metrics_headers = collections.OrderedDict(metrics_headers) return ' '.join(['%s/%s' % (k, v) for k, v in metrics_headers.items()])
Convert the provided metrics headers to a string. Iterate over the metrics headers (a dictionary, usually ordered) and return a properly-formatted space-separated string (e.g. foo/1.2.3 bar/3.14.159).
def wrap_socket(self, sock, server_side=False, do_handshake_on_connect=True, suppress_ragged_eofs=True, dummy=None): return ssl.wrap_socket(sock, keyfile=self._keyfile, certfile=self._certfile, server_side=server_side, cert_reqs=self._verify_mode, ssl_version=self._protocol, ca_certs=self._cafile, do_handshake_on_connect=do_handshake_on_connect, suppress_ragged_eofs=suppress_ragged_eofs)
Wrap an existing Python socket sock and return an ssl.SSLSocket object.
def version(self): try: f = self.func.__call__.__code__ except AttributeError: f = self.func.__code__ h = md5() h.update(f.co_code) h.update(str(f.co_names).encode()) try: closure = self.func.__closure__ except AttributeError: return h.hexdigest() if closure is None or self.closure_fingerprint is None: return h.hexdigest() d = dict( (name, cell.cell_contents) for name, cell in zip(f.co_freevars, closure)) h.update(self.closure_fingerprint(d).encode()) return h.hexdigest()
Compute the version identifier for this functional node using the func code and local names. Optionally, also allow closed-over variable values to affect the version number when closure_fingerprint is specified
def configure(paths, relative_to): if not paths: return for path in [normalize_path(p, relative_to) for p in paths]: logger.debug('configuration path {0}'.format(path)) pubkeys_path = join(path, PUBKEYSDIR) if os.path.exists(pubkeys_path): load_pubkeys(pubkeys_path, PUBKEYS) init_module(path)
Iterate on each configuration path, collecting all public keys destined for the new node's root account's authorized keys. Additionally attempt to import path as python module.
def address(self): if self.isDirect(): base36 = self._iban[4:] asInt = int(base36, 36) return to_checksum_address(pad_left_hex(baseN(asInt, 16), 20)) return ""
Should be called to get client direct address @method address @returns {String} client direct address
def reverse_index_mapping(self): if self._reverse_index_mapping is None: if self.is_indexed: r = np.zeros(self.base_length, dtype=np.int32) - 1 r[self.order] = np.arange(len(self.order), dtype=np.int32) elif self.data.base is None: r = np.arange(self.data_length, dtype=np.int32) else: r = np.zeros(self.base_length, dtype=np.int32) - 1 r[self.data_start - self.base_start:self.data_end - self.base_start] = np.arange(self.data_length, dtype=np.int32) self._reverse_index_mapping = r return self._reverse_index_mapping
Get mapping from this segment's indexes to the indexes of the base array. If the index is < 0, the index is out of range, meaning that it doesn't exist in this segment and is not mapped to the base array
def _venv_match(self, installed, requirements): if not requirements: return None if installed else [] satisfying_deps = [] for repo, req_deps in requirements.items(): useful_inst = set() if repo not in installed: return None if repo == REPO_VCS: inst_deps = {VCSDependency(url) for url in installed[repo].keys()} else: inst_deps = {Distribution(project_name=dep, version=ver) for (dep, ver) in installed[repo].items()} for req in req_deps: for inst in inst_deps: if inst in req: useful_inst.add(inst) break else: return None if useful_inst == inst_deps: satisfying_deps.extend(inst_deps) else: return None return satisfying_deps
Return True if what is installed satisfies the requirements. This method has multiple exit-points, but only for False (because if *anything* is not satisified, the venv is no good). Only after all was checked, and it didn't exit, the venv is ok so return True.
def handle_market_close(self, dt, data_portal): completed_session = self._current_session if self.emission_rate == 'daily': self.sync_last_sale_prices(dt, data_portal) session_ix = self._session_count self._session_count += 1 packet = { 'period_start': self._first_session, 'period_end': self._last_session, 'capital_base': self._capital_base, 'daily_perf': { 'period_open': self._market_open, 'period_close': dt, }, 'cumulative_perf': { 'period_open': self._first_session, 'period_close': self._last_session, }, 'progress': self._progress(self), 'cumulative_risk_metrics': {}, } ledger = self._ledger ledger.end_of_session(session_ix) self.end_of_session( packet, ledger, completed_session, session_ix, data_portal, ) return packet
Handles the close of the given day. Parameters ---------- dt : Timestamp The most recently completed simulation datetime. data_portal : DataPortal The current data portal. Returns ------- A daily perf packet.
def get_section_by_offset(self, offset): sections = [s for s in self.sections if s.contains_offset(offset)] if sections: return sections[0] return None
Get the section containing the given file offset.
def clean_translated_locales(configuration, langs=None): if not langs: langs = configuration.translated_locales for locale in langs: clean_locale(configuration, locale)
Strips out the warning from all translated po files about being an English source file.
def set_transaction_isolation(self, level): self.ensure_connected() self._transaction_isolation_level = level self._platform.set_transaction_isolation(level)
Sets the transaction isolation level. :param level: the level to set
def _safe_output(line): return not any([ line.startswith('Listing') and line.endswith('...'), line.startswith('Listing') and '\t' not in line, '...done' in line, line.startswith('WARNING:') ])
Looks for rabbitmqctl warning, or general formatting, strings that aren't intended to be parsed as output. Returns a boolean whether the line can be parsed as rabbitmqctl output.
def out_16(library, session, space, offset, data, extended=False): if extended: return library.viOut16Ex(session, space, offset, data, extended=False) else: return library.viOut16(session, space, offset, data, extended=False)
Write in an 16-bit value from the specified memory space and offset. Corresponds to viOut16* functions of the VISA library. :param library: the visa library wrapped by ctypes. :param session: Unique logical identifier to a session. :param space: Specifies the address space. (Constants.*SPACE*) :param offset: Offset (in bytes) of the address or register from which to read. :param data: Data to write to bus. :param extended: Use 64 bits offset independent of the platform. :return: return value of the library call. :rtype: :class:`pyvisa.constants.StatusCode`
def get_parameter(self, parameter): "Return a dict for given parameter" parameter = self._get_parameter_name(parameter) return self._parameters[parameter]
Return a dict for given parameter
def exclude_data_files(self, package, src_dir, files): files = list(files) patterns = self._get_platform_patterns( self.exclude_package_data, package, src_dir, ) match_groups = ( fnmatch.filter(files, pattern) for pattern in patterns ) matches = itertools.chain.from_iterable(match_groups) bad = set(matches) keepers = ( fn for fn in files if fn not in bad ) return list(_unique_everseen(keepers))
Filter filenames for package's data files in 'src_dir
def set_speed(self,speed): self.speed=speed self.send_cmd("SPEED"+str(speed))
Set the display speed. The parameters is the number of milliseconds between each column scrolling off the display
def _resolve_transformations(transformations): registry = _ModulesRegistry() transformations = transformations or [] for t in transformations: try: mod, attr = t.split(":", 1) yield getattr(registry.require(mod), attr) except ValueError: yield getattr(bonobo, t)
Resolve a collection of strings into the matching python objects, defaulting to bonobo namespace if no package is provided. Syntax for each string is path.to.package:attribute :param transformations: tuple(str) :return: tuple(object)
def get(self, sid): return QueryContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid, )
Constructs a QueryContext :param sid: The unique string that identifies the resource :returns: twilio.rest.autopilot.v1.assistant.query.QueryContext :rtype: twilio.rest.autopilot.v1.assistant.query.QueryContext
def node_container(self, container_id): path = '/ws/v1/node/containers/{containerid}'.format( containerid=container_id) return self.request(path)
A container resource contains information about a particular container that is running on this NodeManager. :param str container_id: The container id :returns: API response object with JSON data :rtype: :py:class:`yarn_api_client.base.Response`
def display(fig=None, closefig=True, **kwargs): from IPython.display import HTML if fig is None: fig = plt.gcf() if closefig: plt.close(fig) html = fig_to_html(fig, **kwargs) iframe_html = '<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>'\ .format(html = base64.b64encode(html.encode('utf8')).decode('utf8'), width = '100%', height= int(60.*fig.get_figheight()), ) return HTML(iframe_html)
Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure
def mkdir_command(endpoint_plus_path): endpoint_id, path = endpoint_plus_path client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) res = client.operation_mkdir(endpoint_id, path=path) formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="message")
Executor for `globus mkdir`
def find(cls, paths): pythons = [] for path in paths: for fn in cls.expand_path(path): basefile = os.path.basename(fn) if cls._matches_binary_name(basefile): try: pythons.append(cls.from_binary(fn)) except Exception as e: TRACER.log('Could not identify %s: %s' % (fn, e)) continue return pythons
Given a list of files or directories, try to detect python interpreters amongst them. Returns a list of PythonInterpreter objects.
def _get_seqprop_to_seqprop_alignment(self, seqprop1, seqprop2): if isinstance(seqprop1, str): seqprop1_id = seqprop1 else: seqprop1_id = seqprop1.id if isinstance(seqprop2, str): seqprop2_id = seqprop2 else: seqprop2_id = seqprop2.id aln_id = '{}_{}'.format(seqprop1_id, seqprop2_id) if self.sequence_alignments.has_id(aln_id): alignment = self.sequence_alignments.get_by_id(aln_id) return alignment else: raise ValueError('{}: sequence alignment not found, please run the alignment first'.format(aln_id))
Return the alignment stored in self.sequence_alignments given a seqprop + another seqprop
def is_battery_level(value): try: value = percent_int(value) return value except vol.Invalid: _LOGGER.warning( '%s is not a valid battery level, falling back to battery level 0', value) return 0
Validate that value is a valid battery level integer.
def is_installed(self, bug: Bug) -> bool: r = self.__api.get('bugs/{}/installed'.format(bug.name)) if r.status_code == 200: answer = r.json() assert isinstance(answer, bool) return answer if r.status_code == 404: raise KeyError("no bug found with given name: {}".format(bug.name)) self.__api.handle_erroneous_response(r)
Determines whether the Docker image for a given bug has been installed on the server.
def name(self): parts = self._parts if len(parts) == (1 if (self._drv or self._root) else 0): return '' return parts[-1]
The final path component, if any.
def logToFile(path, level=logging.INFO): logger = logging.getLogger() logger.setLevel(level) formatter = logging.Formatter( '%(asctime)s %(name)s %(levelname)s %(message)s') handler = logging.FileHandler(path) handler.setFormatter(formatter) logger.addHandler(handler)
Create a log handler that logs to the given file.
def validate_file(fn, options=None): file_results = FileValidationResults(filepath=fn) output.info("Performing JSON schema validation on %s" % fn) if not options: options = ValidationOptions(files=fn) try: with open(fn) as instance_file: file_results.object_results = validate(instance_file, options) except Exception as ex: if 'Expecting value' in str(ex): line_no = str(ex).split()[3] file_results.fatal = ValidationErrorResults( 'Invalid JSON input on line %s' % line_no ) else: file_results.fatal = ValidationErrorResults(ex) msg = ("Unexpected error occurred with file '{fn}'. No further " "validation will be performed: {error}") output.info(msg.format(fn=fn, error=str(ex))) file_results.is_valid = (all(object_result.is_valid for object_result in file_results.object_results) and not file_results.fatal) return file_results
Validate the input document `fn` according to the options passed in. If any exceptions are raised during validation, no further validation will take place. Args: fn: The filename of the JSON file to be validated. options: An instance of ``ValidationOptions``. Returns: An instance of FileValidationResults.
def initiate_handshake(self, headers, timeout=None): io_loop = IOLoop.current() timeout = timeout or DEFAULT_INIT_TIMEOUT_SECS self.writer.put(messages.InitRequestMessage( version=PROTOCOL_VERSION, headers=headers )) init_res_future = self.reader.get() timeout_handle = io_loop.call_later(timeout, ( lambda: init_res_future.set_exception(errors.TimeoutError( 'Handshake with %s:%d timed out. Did not receive an INIT_RES ' 'after %s seconds' % ( self.remote_host, self.remote_host_port, str(timeout) ) )) )) io_loop.add_future( init_res_future, (lambda _: io_loop.remove_timeout(timeout_handle)), ) init_res = yield init_res_future if init_res.message_type != Types.INIT_RES: raise errors.UnexpectedError( "Expected handshake response, got %s" % repr(init_res) ) self._extract_handshake_headers(init_res) self._handshake_performed = True self._loop() raise tornado.gen.Return(init_res)
Initiate a handshake with the remote host. :param headers: A dictionary of headers to send. :returns: A future that resolves (with a value of None) when the handshake is complete.
def new_task(func): @wraps(func) async def wrapper(self, *args, **kwargs): loop = get_event_loop() loop.create_task(func(self, *args, **kwargs)) return wrapper
Runs the decorated function in a new task
def create_from(cls, another, **kwargs): reused_fields = {} for field, value in another.get_fields(): if field in cls.FIELDS: reused_fields[field] = value reused_fields.update(kwargs) return cls(**reused_fields)
Create from another object of different type. Another object must be from a derived class of SimpleObject (which contains FIELDS)
def _check_version(self, request): version = self._get_version(request) if version and version != self.version: raise Error('OAuth version %s not supported.' % str(version))
Verify the correct version of the request for this server.
def is_cloudflare_challenge(response): return ( response.status == 503 and response.headers.get('Server', '').startswith(b'cloudflare') and 'jschl_vc' in response.text and 'jschl_answer' in response.text )
Test if the given response contains the cloudflare's anti-bot protection
def verifyWriteMode(files): if not isinstance(files, list): files = [files] not_writable = [] writable = True for fname in files: try: f = open(fname,'a') f.close() del f except: not_writable.append(fname) writable = False if not writable: print('The following file(s) do not have write permission!') for fname in not_writable: print(' ', fname) return writable
Checks whether files are writable. It is up to the calling routine to raise an Exception, if desired. This function returns True, if all files are writable and False, if any are not writable. In addition, for all files found to not be writable, it will print out the list of names of affected files.
def message(self, bot, comm): super(KarmaAdv, self).message(bot, comm) if not comm['directed'] and not comm['pm']: msg = comm['message'].strip().lower() words = self.regstr.findall(msg) karmas = self.modify_karma(words) if comm['user'] in karmas.keys(): if karmas[comm['user']] <= 0: bot.reply(comm, "Don't be so hard on yourself.") else: bot.reply(comm, "Tisk, tisk, no up'ing your own karma.") self.update_db(comm["user"], karmas)
Check for strings ending with 2 or more '-' or '+'
def install_lib(url, replace_existing=False, fix_wprogram=True): d = tmpdir(tmpdir()) f = download(url) Archive(f).extractall(d) clean_dir(d) d, src_dlib = find_lib_dir(d) move_examples(d, src_dlib) fix_examples_dir(src_dlib) if fix_wprogram: fix_wprogram_in_files(src_dlib) targ_dlib = libraries_dir() / src_dlib.name if targ_dlib.exists(): log.debug('library already exists: %s', targ_dlib) if replace_existing: log.debug('remove %s', targ_dlib) targ_dlib.rmtree() else: raise ConfduinoError('library already exists:' + targ_dlib) log.debug('move %s -> %s', src_dlib, targ_dlib) src_dlib.move(targ_dlib) libraries_dir().copymode(targ_dlib) for x in targ_dlib.walk(): libraries_dir().copymode(x) return targ_dlib.name
install library from web or local files system. :param url: web address or file path :param replace_existing: bool :rtype: None
def GetListCollection(self): soap_request = soap('GetListCollection') self.last_request = str(soap_request) response = self._session.post(url=self._url('SiteData'), headers=self._headers('GetListCollection'), data=str(soap_request), verify=self._verify_ssl, timeout=self.timeout) if response.status_code == 200: envelope = etree.fromstring(response.text.encode('utf-8'), parser=etree.XMLParser(huge_tree=self.huge_tree)) result = envelope[0][0][0].text lists = envelope[0][0][1] data = [] for _list in lists: _list_data = {} for item in _list: key = item.tag.replace('{http://schemas.microsoft.com/sharepoint/soap/}', '') value = item.text _list_data[key] = value data.append(_list_data) return data else: return response
Returns List information for current Site
def _list_files(path, suffix=""): if os.path.isdir(path): incomplete = os.listdir(path) complete = [os.path.join(path, entry) for entry in incomplete] lists = [_list_files(subpath, suffix) for subpath in complete] flattened = [] for one_list in lists: for elem in one_list: flattened.append(elem) return flattened else: assert os.path.exists(path), "couldn't find file '%s'" % path if path.endswith(suffix): return [path] return []
Returns a list of all files ending in `suffix` contained within `path`. Parameters ---------- path : str a filepath suffix : str Returns ------- l : list A list of all files ending in `suffix` contained within `path`. (If `path` is a file rather than a directory, it is considered to "contain" itself)
def is_new_preorder( self, preorder_hash, lastblock=None ): if lastblock is None: lastblock = self.lastblock preorder = namedb_get_name_preorder( self.db, preorder_hash, lastblock ) if preorder is not None: return False else: return True
Given a preorder hash of a name, determine whether or not it is unseen before.
def pre_execute(self, execution, context): path = self._fspath if path: path = path.format( benchmark=context.benchmark, api=execution['category'], **execution.get('metas', {}) ) if self.clean_path: shutil.rmtree(path, ignore_errors=True) if execution['metas']['file_mode'] == 'onefile': path = osp.dirname(path) if not osp.exists(path): os.makedirs(path)
Make sure the named directory is created if possible
def info(self, user_id): resp = self._rtm_client.get('v1/user.info?user_id={}'.format(user_id)) if resp.is_fail(): raise RTMServiceError('Failed to get user information', resp) return resp.data['result']
Gets user information by user id Args: user_id(int): the id of user Returns: User Throws: RTMServiceError when request failed
def restore_backup( self, bootstrap=False, constraints=None, archive=None, backup_id=None, upload_tools=False): raise NotImplementedError()
Restore a backup archive to a new controller. :param bool bootstrap: Bootstrap a new state machine :param constraints: Model constraints :type constraints: :class:`juju.Constraints` :param str archive: Path to backup archive to restore :param str backup_id: Id of backup to restore :param bool upload_tools: Upload tools if bootstrapping a new machine
def nack(messageid, subscriptionid, transactionid=None): header = 'subscription:%s\nmessage-id:%s' % (subscriptionid, messageid) if transactionid: header += '\ntransaction:%s' % transactionid return "NACK\n%s\n\n\x00\n" % header
STOMP negative acknowledge command. NACK is the opposite of ACK. It is used to tell the server that the client did not consume the message. The server can then either send the message to a different client, discard it, or put it in a dead letter queue. The exact behavior is server specific. messageid: This is the id of the message we are acknowledging, what else could it be? ;) subscriptionid: This is the id of the subscription that applies to the message. transactionid: This is the id that all actions in this transaction will have. If this is not given then a random UUID will be generated for this.
def settings(self) -> typing.Union[None, SharedCache]: return self._project.settings if self._project else None
The settings associated with this project.
def erase_disk_partitions(disk_id=None, scsi_address=None, service_instance=None): if not disk_id and not scsi_address: raise ArgumentValueError('Either \'disk_id\' or \'scsi_address\' ' 'needs to be specified') host_ref = _get_proxy_target(service_instance) hostname = __proxy__['esxi.get_details']()['esxi_host'] if not disk_id: scsi_address_to_lun = \ salt.utils.vmware.get_scsi_address_to_lun_map(host_ref) if scsi_address not in scsi_address_to_lun: raise VMwareObjectRetrievalError( 'Scsi lun with address \'{0}\' was not found on host \'{1}\'' ''.format(scsi_address, hostname)) disk_id = scsi_address_to_lun[scsi_address].canonicalName log.trace('[%s] Got disk id \'%s\' for scsi address \'%s\'', hostname, disk_id, scsi_address) log.trace('Erasing disk partitions on disk \'%s\' in host \'%s\'', disk_id, hostname) salt.utils.vmware.erase_disk_partitions(service_instance, host_ref, disk_id, hostname=hostname) log.info('Erased disk partitions on disk \'%s\' on host \'%s\'', disk_id, hostname) return True
Erases the partitions on a disk. The disk can be specified either by the canonical name, or by the scsi_address. disk_id Canonical name of the disk. Either ``disk_id`` or ``scsi_address`` needs to be specified (``disk_id`` supersedes ``scsi_address``. scsi_address Scsi address of the disk. ``disk_id`` or ``scsi_address`` needs to be specified (``disk_id`` supersedes ``scsi_address``. service_instance Service instance (vim.ServiceInstance) of the vCenter/ESXi host. Default is None. .. code-block:: bash salt '*' vsphere.erase_disk_partitions scsi_address='vmhaba0:C0:T0:L0' salt '*' vsphere.erase_disk_partitions disk_id='naa.000000000000001'
def _on_status_message(self, sequence, topic, message): self._logger.debug("Received message on (topic=%s): %s" % (topic, message)) try: conn_key = self._find_connection(topic) except ArgumentError: self._logger.warn("Dropping message that does not correspond with a known connection, message=%s", message) return if messages.ConnectionResponse.matches(message): if self.name != message['client']: self._logger.debug("Connection response received for a different client, client=%s, name=%s", message['client'], self.name) return self.conns.finish_connection(conn_key, message['success'], message.get('failure_reason', None)) else: self._logger.warn("Dropping message that did not correspond with a known schema, message=%s", message)
Process a status message received Args: sequence (int): The sequence number of the packet received topic (string): The topic this message was received on message (dict): The message itself
def new_consumer(self, config, consumer_name): return Consumer(0, dict(), config.get('qty', self.DEFAULT_CONSUMER_QTY), config.get('queue', consumer_name))
Return a consumer dict for the given name and configuration. :param dict config: The consumer configuration :param str consumer_name: The consumer name :rtype: dict
def notGroup (states, *stateIndexPairs): start, dead = group(states, *stateIndexPairs) finish = len(states) states.append([]) states[start].append((DEFAULT, finish)) return start, finish
Like group, but will add a DEFAULT transition to a new end state, causing anything in the group to not match by going to a dead state. XXX I think this is right...
def add_field_value(self, field, value): super(Issue, self).update(fields={"update": {field: [{"add": value}]}})
Add a value to a field that supports multiple values, without resetting the existing values. This should work with: labels, multiple checkbox lists, multiple select :param field: The field name :param value: The field's value :type field: str
def sync(self): self._elk.add_handler('VN', self._vn_handler) self._elk.add_handler('XK', self._xk_handler) self._elk.add_handler('RP', self._rp_handler) self._elk.add_handler('IE', self._elk.call_sync_handlers) self._elk.add_handler('SS', self._ss_handler) self._elk.send(vn_encode()) self._elk.send(lw_encode()) self._elk.send(ss_encode())
Retrieve panel information from ElkM1
def startswith(self, pat): check_type(pat, str) return _series_bool_result(self, weld_str_startswith, pat=pat)
Test if elements start with pat. Parameters ---------- pat : str Returns ------- Series
def _getgroup(string, depth): out, comma = [], False while string: items, string = _getitem(string, depth) if not string: break out += items if string[0] == '}': if comma: return out, string[1:] return ['{' + a + '}' for a in out], string[1:] if string[0] == ',': comma, string = True, string[1:] return None
Get a group from the string, where group is a list of all the comma separated substrings up to the next '}' char or the brace enclosed substring if there is no comma
def do_child_watch(self, params): get_child_watcher(self._zk, print_func=self.show_output).update( params.path, params.verbose)
\x1b[1mNAME\x1b[0m child_watch - Watch a path for child changes \x1b[1mSYNOPSIS\x1b[0m child_watch <path> [verbose] \x1b[1mOPTIONS\x1b[0m * verbose: prints list of znodes (default: false) \x1b[1mEXAMPLES\x1b[0m # only prints the current number of children > child_watch / # prints num of children along with znodes listing > child_watch / true
def load_file_or_hdu(filename): if isinstance(filename, fits.HDUList): hdulist = filename else: hdulist = fits.open(filename, ignore_missing_end=True) return hdulist
Load a file from disk and return an HDUList If filename is already an HDUList return that instead Parameters ---------- filename : str or HDUList File or HDU to be loaded Returns ------- hdulist : HDUList
def insert_node_node(**kw): with current_app.app_context(): insert_query(name='select_link_node_from_node.sql', node_id=kw.get('node_id')) db.execute(text(fetch_query_string('insert_node_node.sql')), **kw)
Link a node to another node. node_id -> target_node_id. Where `node_id` is the parent and `target_node_id` is the child.
def getSbus(self, buses=None): bs = self.buses if buses is None else buses s = array([self.s_surplus(v) / self.base_mva for v in bs]) return s
Returns the net complex bus power injection vector in p.u.
def create_db_user(username, password=None, flags=None): flags = flags or u'-D -A -R' sudo(u'createuser %s %s' % (flags, username), user=u'postgres') if password: change_db_user_password(username, password)
Create a databse user.
def schedule_ping_frequency(self): "Send a ping message to slack every 20 seconds" ping = crontab('* * * * * */20', func=self.send_ping, start=False) ping.start()
Send a ping message to slack every 20 seconds
def get_float(self, input_string): if input_string == '--training_fraction': try: index = self.args.index(input_string) + 1 except ValueError: return None try: if self.args[index] in self.flags: print("\n {flag} was set but a value was not specified".format(flag=input_string)) print_short_help() sys.exit(1) except IndexError: print("\n {flag} was set but a value was not specified".format(flag=input_string)) print_short_help() sys.exit(1) try: value = float(self.args[index]) except ValueError: print("\n {flag} must be a float less than or equal to 1, e.g. 0.4".format(flag=input_string)) print_short_help() sys.exit(1) if value > 1.0 or value < 0: print("\n {flag} must be a float less than or equal to 1, e.g. 0.4".format(flag=input_string)) print_short_help() sys.exit(1) return value
Return float type user input
def advanced_search(self, terms, relation=None, index=0, limit=25, **kwargs): assert isinstance(terms, dict), "terms must be a dict" query = " ".join(sorted(['{}:"{}"'.format(k, v) for (k, v) in terms.items()])) return self.get_object( "search", relation=relation, q=query, index=index, limit=limit, **kwargs )
Advanced search of track, album or artist. See `Search section of Deezer API <https://developers.deezer.com/api/search>`_ for search terms. :returns: a list of :class:`~deezer.resources.Resource` objects. >>> client.advanced_search({"artist": "Daft Punk", "album": "Homework"}) >>> client.advanced_search({"artist": "Daft Punk", "album": "Homework"}, ... relation="track")
def delete_user(self, user): user_id = utils.get_id(user) uri = "users/%s" % user_id resp, resp_body = self.method_delete(uri) if resp.status_code == 404: raise exc.UserNotFound("User '%s' does not exist." % user) elif resp.status_code in (401, 403): raise exc.AuthorizationFailure("You are not authorized to delete " "users.")
ADMIN ONLY. Removes the user from the system. There is no 'undo' available, so you should be certain that the user specified is the user you wish to delete.
def stop(self): try: self.aitask.stop() self.aotask.stop() pass except: print u"No task running" self.aitask = None self.aotask = None
Halts the acquisition, this must be called before resetting acquisition
def save(self): content = self.dumps() fileutils.save_text_to_file(content, self.file_path)
Saves the settings contents
def syllabify(self, hierarchy): if len(self.long_lines) == 0: logger.error("No text was imported") self.syllabified_text = [] else: syllabifier = Syllabifier(language="old_norse", break_geminants=True) syllabifier.set_hierarchy(hierarchy) syllabified_text = [] for i, long_line in enumerate(self.long_lines): syllabified_text.append([]) for short_line in long_line: assert isinstance(short_line, ShortLine) or isinstance(short_line, LongLine) short_line.syllabify(syllabifier) syllabified_text[i].append(short_line.syllabified) self.syllabified_text = syllabified_text
Syllables may play a role in verse classification.
def comment_lines(lines, prefix): if not prefix: return lines return [prefix + ' ' + line if line else prefix for line in lines]
Return commented lines
def _build_object_table(self): types = self.domain.types objects = dict(self.non_fluents.objects) self.object_table = dict() for name, value in self.domain.types: if value == 'object': objs = objects[name] idx = { obj: i for i, obj in enumerate(objs) } self.object_table[name] = { 'size': len(objs), 'idx': idx, 'objects': objs }
Builds the object table for each RDDL type.
def list_poll_choices_in_poll(self, poll_id): path = {} data = {} params = {} path["poll_id"] = poll_id self.logger.debug("GET /api/v1/polls/{poll_id}/poll_choices with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/polls/{poll_id}/poll_choices".format(**path), data=data, params=params, no_data=True)
List poll choices in a poll. Returns the list of PollChoices in this poll.
def get_async(self, **ctx_options): from . import model, tasklets ctx = tasklets.get_context() cls = model.Model._kind_map.get(self.kind()) if cls: cls._pre_get_hook(self) fut = ctx.get(self, **ctx_options) if cls: post_hook = cls._post_get_hook if not cls._is_default_hook(model.Model._default_post_get_hook, post_hook): fut.add_immediate_callback(post_hook, self, fut) return fut
Return a Future whose result is the entity for this Key. If no such entity exists, a Future is still returned, and the Future's eventual return result be None.
def render_inner(self, token): rendered = [self.render(child) for child in token.children] return ''.join(rendered)
Recursively renders child tokens. Joins the rendered strings with no space in between. If newlines / spaces are needed between tokens, add them in their respective templates, or override this function in the renderer subclass, so that whitespace won't seem to appear magically for anyone reading your program. Arguments: token: a branch node who has children attribute.
def rpy(self): x, y, z, w = self.x, self.y, self.z, self.w roll = math.atan2(2*y*w - 2*x*z, 1 - 2*y*y - 2*z*z) pitch = math.atan2(2*x*w - 2*y*z, 1 - 2*x*x - 2*z*z) yaw = math.asin(2*x*y + 2*z*w) return (roll, pitch, yaw)
Calculates the Roll, Pitch and Yaw of the Quaternion.
def plot_residuals(self, plot=None): if plot is None: import matplotlib.pyplot as plot x = numpy.arange(1, len(self.residuals) + 1) y = _gvar.mean(self.residuals) yerr = _gvar.sdev(self.residuals) plot.errorbar(x=x, y=y, yerr=yerr, fmt='o', color='b') plot.ylabel('normalized residuals') xr = [x[0], x[-1]] plot.plot([x[0], x[-1]], [0, 0], 'r-') plot.fill_between( x=xr, y1=[-1,-1], y2=[1,1], color='r', alpha=0.075 ) return plot
Plot normalized fit residuals. The sum of the squares of the residuals equals ``self.chi2``. Individual residuals should be distributed about one, in a Gaussian distribution. Args: plot: :mod:`matplotlib` plotter. If ``None``, uses ``matplotlib.pyplot`. Returns: Plotter ``plot``.
def get_penalty_model(specification): feasible_configurations = specification.feasible_configurations if specification.vartype is dimod.BINARY: feasible_configurations = {tuple(2 * v - 1 for v in config): en for config, en in iteritems(feasible_configurations)} ising_quadratic_ranges = specification.ising_quadratic_ranges quadratic_ranges = {(u, v): ising_quadratic_ranges[u][v] for u, v in specification.graph.edges} try: bqm, gap = generate_bqm(specification.graph, feasible_configurations, specification.decision_variables, linear_energy_ranges=specification.ising_linear_ranges, quadratic_energy_ranges=quadratic_ranges, min_classical_gap=specification.min_classical_gap) except ValueError: raise pm.exceptions.FactoryException("Specification is for too large of a model") return pm.PenaltyModel.from_specification(specification, bqm, gap, 0.0)
Factory function for penaltymodel-lp. Args: specification (penaltymodel.Specification): The specification for the desired penalty model. Returns: :class:`penaltymodel.PenaltyModel`: Penalty model with the given specification. Raises: :class:`penaltymodel.ImpossiblePenaltyModel`: If the penalty cannot be built. Parameters: priority (int): -100
def http_method(self, method): self.build_url() try: response = self.get_http_method(method) is_success = response.ok try: response_message = response.json() except ValueError: response_message = response.text except requests.exceptions.RequestException as exc: is_success = False response_message = exc.args return is_success, response_message
Execute the given HTTP method and returns if it's success or not and the response as a string if not success and as python object after unjson if it's success.
def _vertex_one_color_qubo(x_vars): Q = {} for v in x_vars: for color in x_vars[v]: idx = x_vars[v][color] Q[(idx, idx)] = -1 for color0, color1 in itertools.combinations(x_vars[v], 2): idx0 = x_vars[v][color0] idx1 = x_vars[v][color1] Q[(idx0, idx1)] = 2 return Q
For each vertex, it should have exactly one color. Generates the QUBO to enforce this constraint. Notes ----- Does not enforce neighboring vertices having different colors. Ground energy is -1 * |G|, infeasible gap is 1.
def date_0utc(date): return ee.Date.fromYMD(date.get('year'), date.get('month'), date.get('day'))
Get the 0 UTC date for a date Parameters ---------- date : ee.Date Returns ------- ee.Date
def container_device_add(name, device_name, device_type='disk', remote_addr=None, cert=None, key=None, verify_cert=True, **kwargs): container = container_get( name, remote_addr, cert, key, verify_cert, _raw=True ) kwargs['type'] = device_type return _set_property_dict_item( container, 'devices', device_name, kwargs )
Add a container device name : Name of the container device_name : The device name to add device_type : Type of the device ** kwargs : Additional device args remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! Examples: https://myserver.lan:8443 /var/lib/mysocket.sock cert : PEM Formatted SSL Certificate. Examples: ~/.config/lxc/client.crt key : PEM Formatted SSL Key. Examples: ~/.config/lxc/client.key verify_cert : True Wherever to verify the cert, this is by default True but in the most cases you want to set it off as LXD normaly uses self-signed certificates.
def _preprocessContaminantOutFilePath(outPath): if '/' in outPath: splitPath = outPath.split('/') elif '\\' in outPath: splitPath = outPath.split('\\') else: splitPath = [outPath, ] if splitPath[-1] == '': outputFilename = splitPath[-2] else: outputFilename = splitPath[-1] if '.' in outputFilename: outputFilename = outputFilename.split('.')[0] return outputFilename
Preprocess the contaminant output file path to a relative path.
def is_topk(self, topk=10, reverse=False): with cython_context(): return SArray(_proxy = self.__proxy__.topk_index(topk, reverse))
Create an SArray indicating which elements are in the top k. Entries are '1' if the corresponding element in the current SArray is a part of the top k elements, and '0' if that corresponding element is not. Order is descending by default. Parameters ---------- topk : int The number of elements to determine if 'top' reverse : bool If True, return the topk elements in ascending order Returns ------- out : SArray (of type int) Notes ----- This is used internally by SFrame's topk function.
def model_saved(sender, instance, created, raw, using, **kwargs): opts = get_opts(instance) model = '.'.join([opts.app_label, opts.object_name]) action = 'created' if created else 'updated' distill_model_event(instance, model, action)
Automatically triggers "created" and "updated" actions.
def iter_links_element_text(cls, element): if element.text: link_type = identify_link_type(element.text) yield LinkInfo( element=element, tag=element.tag, attrib=None, link=element.text, inline=False, linked=True, base_link=None, value_type='plain', link_type=link_type )
Get the element text as a link.
def create_classifier(self, metadata, training_data, **kwargs): if metadata is None: raise ValueError('metadata must be provided') if training_data is None: raise ValueError('training_data must be provided') headers = {} if 'headers' in kwargs: headers.update(kwargs.get('headers')) sdk_headers = get_sdk_headers('natural_language_classifier', 'V1', 'create_classifier') headers.update(sdk_headers) form_data = {} form_data['training_metadata'] = (None, metadata, 'application/json') form_data['training_data'] = (None, training_data, 'text/csv') url = '/v1/classifiers' response = self.request( method='POST', url=url, headers=headers, files=form_data, accept_json=True) return response
Create classifier. Sends data to create and train a classifier and returns information about the new classifier. :param file metadata: Metadata in JSON format. The metadata identifies the language of the data, and an optional name to identify the classifier. Specify the language with the 2-letter primary language code as assigned in ISO standard 639. Supported languages are English (`en`), Arabic (`ar`), French (`fr`), German, (`de`), Italian (`it`), Japanese (`ja`), Korean (`ko`), Brazilian Portuguese (`pt`), and Spanish (`es`). :param file training_data: Training data in CSV format. Each text value must have at least one class. The data can include up to 3,000 classes and 20,000 records. For details, see [Data preparation](https://cloud.ibm.com/docs/services/natural-language-classifier/using-your-data.html). :param dict headers: A `dict` containing the request headers :return: A `DetailedResponse` containing the result, headers and HTTP status code. :rtype: DetailedResponse
def import_pf_config(self): scenario = cfg_ding0.get("powerflow", "test_grid_stability_scenario") start_hour = int(cfg_ding0.get("powerflow", "start_hour")) end_hour = int(cfg_ding0.get("powerflow", "end_hour")) start_time = datetime(1970, 1, 1, 00, 00, 0) resolution = cfg_ding0.get("powerflow", "resolution") srid = str(int(cfg_ding0.get('geo', 'srid'))) return PFConfigDing0(scenarios=[scenario], timestep_start=start_time, timesteps_count=end_hour-start_hour, srid=srid, resolution=resolution)
Creates power flow config class and imports config from file Returns ------- PFConfigDing0 PFConfigDing0 object
def get_group_id(self, uuid=None): group_data = self.get_group(uuid) try: return group_data['response']['docs'][0]['id'] except (KeyError, IndexError): failure_message = ('Error in get_group response data - ' 'got {0}'.format(group_data)) log.exception(failure_message) raise PyLmodUnexpectedData(failure_message)
Get group id based on uuid. Args: uuid (str): optional uuid. defaults to self.cuuid Raises: PyLmodUnexpectedData: No group data was returned. requests.RequestException: Exception connection error Returns: int: numeric group id
def rshift_arithmetic(self, shift_amount): lower, upper = self._pre_shift(shift_amount) ret = None for amount in xrange(lower, upper + 1): si_ = self._rshift_arithmetic(amount) ret = si_ if ret is None else ret.union(si_) ret.normalize() ret.uninitialized = self.uninitialized return ret
Arithmetic shift right. :param StridedInterval shift_amount: The amount of shifting :return: The shifted StridedInterval :rtype: StridedInterval
def put(path, obj): try: import cPickle as pickle except: import pickle with open(path, 'wb') as file: return pickle.dump(obj, file)
Write an object to file
def update_bgp_speaker(self, bgp_speaker_id, body=None): return self.put(self.bgp_speaker_path % bgp_speaker_id, body=body)
Update a BGP speaker.
def has_too_few_calls(self): if self.has_exact and self._call_count < self._exact: return True if self.has_minimum and self._call_count < self._minimum: return True return False
Test if there have not been enough calls :rtype boolean
def __is_surrogate_escaped(self, text): try: text.encode('utf-8') except UnicodeEncodeError as e: if e.reason == 'surrogates not allowed': return True return False
Checks if surrogate is escaped
def repr_tree(self): import utool as ut import networkx as nx repr_tree = nx.DiGraph() for u, v in ut.itertwo(self.values()): if not repr_tree.has_edge(v, u): repr_tree.add_edge(u, v) return repr_tree
reconstruct represented tree as a DiGraph to preserve the current rootedness