code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def data(self, data): self._data = {det: d.copy() for (det, d) in data.items()}
Store a copy of the data.
def get_specification(self): resp = self._call('getApplicationSpec', proto.Empty()) return ApplicationSpec.from_protobuf(resp)
Get the specification for the running application. Returns ------- spec : ApplicationSpec
def _DeserializeAttributeContainer(self, container_type, serialized_data): if not serialized_data: return None if self._serializers_profiler: self._serializers_profiler.StartTiming(container_type) try: serialized_string = serialized_data.decode('utf-8') except UnicodeDecodeError as exception: raise IOError('Unable to decode serialized data: {0!s}'.format( exception)) attribute_container = self._serializer.ReadSerialized(serialized_string) if self._serializers_profiler: self._serializers_profiler.StopTiming(container_type) return attribute_container
Deserializes an attribute container. Args: container_type (str): attribute container type. serialized_data (bytes): serialized attribute container data. Returns: AttributeContainer: attribute container or None. Raises: IOError: if the serialized data cannot be decoded. OSError: if the serialized data cannot be decoded.
def format_currency_field(__, prec, number, locale): locale = Locale.parse(locale) currency = get_territory_currencies(locale.territory)[0] if prec is None: pattern, currency_digits = None, True else: prec = int(prec) pattern = locale.currency_formats['standard'] pattern = modify_number_pattern(pattern, frac_prec=(prec, prec)) currency_digits = False return format_currency(number, currency, pattern, locale=locale, currency_digits=currency_digits)
Formats a currency field.
def check_for_end_game(self): gameover = False for player in self.players: if player.playstate in (PlayState.CONCEDED, PlayState.DISCONNECTED): player.playstate = PlayState.LOSING if player.playstate == PlayState.LOSING: gameover = True if gameover: if self.players[0].playstate == self.players[1].playstate: for player in self.players: player.playstate = PlayState.TIED else: for player in self.players: if player.playstate == PlayState.LOSING: player.playstate = PlayState.LOST else: player.playstate = PlayState.WON self.state = State.COMPLETE self.manager.step(self.next_step, Step.FINAL_WRAPUP) self.manager.step(self.next_step, Step.FINAL_GAMEOVER) self.manager.step(self.next_step)
Check if one or more player is currently losing. End the game if they are.
def to_dict(self): def _json_safe(v): if isinstance(v, np.ndarray): return v.tolist() elif is_unit(v)[0]: return v.to_string() else: return v d = {k:_json_safe(v) for k,v in self._descriptors.items()} d['nparray'] = self.__class__.__name__.lower() return d
dump a representation of the nparray object to a dictionary. The nparray object should then be able to be fully restored via nparray.from_dict
def render(self, url, template=None, expiration=0): template = template or self.default_template return render_to_string(template, self.get_context(url, expiration))
Render feed template
def getVersionList(self, full_path, startnum = 0, pagingrow = 50, dummy = 54213): data = {'orgresource': full_path, 'startnum': startnum, 'pagingrow': pagingrow, 'userid': self.user_id, 'useridx': self.useridx, 'dummy': dummy, } s, metadata = self.POST('getVersionList', data) if s is True: return metadata else: print "Error getVersionList: Cannot get version list" return False
Get a version list of a file or dierectory. :param full_path: The full path to get the file or directory property. Path should start with '/' :param startnum: Start version index. :param pagingrow: Max # of version list in one page. :returns: ``metadata`` if succcess or ``False`` (failed to get history or there is no history) :metadata: - createuser - filesize - getlastmodified - href - versioninfo - versionkey
def dim_axis_label(dimensions, separator=', '): if not isinstance(dimensions, list): dimensions = [dimensions] return separator.join([d.pprint_label for d in dimensions])
Returns an axis label for one or more dimensions.
def reorder_view(self, request): model = self.model if not self.has_change_permission(request): raise PermissionDenied if request.method == "POST": object_pks = request.POST.getlist('neworder[]') model.objects.set_orders(object_pks) return HttpResponse("OK")
The 'reorder' admin view for this model.
def find_network_by_name(self, si, path, name): return self.find_obj_by_path(si, path, name, self.Network)
Finds network in the vCenter or returns "None" :param si: pyvmomi 'ServiceInstance' :param path: the path to find the object ('dc' or 'dc/folder' or 'dc/folder/folder/etc...') :param name: the datastore name to return
def remove_reserved_keys(self, op): sanitized = {} reserved = {} for k in op.keys(): if str(k) not in RESERVED_KEYS: sanitized[str(k)] = copy.deepcopy(op[k]) else: reserved[str(k)] = copy.deepcopy(op[k]) return sanitized, reserved
Remove reserved keywords from an op dict, which can then safely be passed into the db. Returns a new op dict, and the reserved fields
def do_alarm_history_list(mc, args): fields = {} if args.dimensions: fields['dimensions'] = utils.format_parameters(args.dimensions) if args.starttime: _translate_starttime(args) fields['start_time'] = args.starttime if args.endtime: fields['end_time'] = args.endtime if args.limit: fields['limit'] = args.limit if args.offset: fields['offset'] = args.offset try: alarm = mc.alarms.history_list(**fields) except (osc_exc.ClientException, k_exc.HttpError) as he: raise osc_exc.CommandError('%s\n%s' % (he.message, he.details)) else: output_alarm_history(args, alarm)
List alarms state history.
def project_community(index, start, end): results = { "author_metrics": [Authors(index, start, end)], "people_top_metrics": [Authors(index, start, end)], "orgs_top_metrics": [Organizations(index, start, end)], } return results
Compute the metrics for the project community section of the enriched git index. Returns a dictionary containing "author_metrics", "people_top_metrics" and "orgs_top_metrics" as the keys and the related Metrics as the values. :param index: index object :param start: start date to get the data from :param end: end date to get the data upto :return: dictionary with the value of the metrics
def load(self, require=True, *args, **kwargs): if not require or args or kwargs: warnings.warn( "Parameters to load are deprecated. Call .resolve and " ".require separately.", PkgResourcesDeprecationWarning, stacklevel=2, ) if require: self.require(*args, **kwargs) return self.resolve()
Require packages for this EntryPoint, then resolve it.
def disassemble(self, start=None, end=None, arch_mode=None): if arch_mode is None: arch_mode = self.binary.architecture_mode curr_addr = start if start else self.binary.ea_start end_addr = end if end else self.binary.ea_end while curr_addr < end_addr: encoding = self.__fetch_instr(curr_addr) asm_instr = self.disassembler.disassemble(encoding, curr_addr, architecture_mode=arch_mode) if not asm_instr: return yield curr_addr, asm_instr, asm_instr.size curr_addr += asm_instr.size
Disassemble native instructions. Args: start (int): Start address. end (int): End address. arch_mode (int): Architecture mode. Returns: (int, Instruction, int): A tuple of the form (address, assembler instruction, instruction size).
def data(self, value): if not value: value = b'' if len(value) > self.SIZE: raise ValueError("The maximum tag size is {0}".format(self.SIZE)) self._data = value while len(self._data) < self.SIZE: self._data += b'\x00'
Set the byte data and fill up the bytes to fit the size.
def _first_owner(self, name): owner = None unowned = [] pieces = normalize_name(name).split('.') while pieces and is_none_or_zero_address(owner): name = '.'.join(pieces) owner = self.owner(name) if is_none_or_zero_address(owner): unowned.append(pieces.pop(0)) return (owner, unowned, name)
Takes a name, and returns the owner of the deepest subdomain that has an owner :returns: (owner or None, list(unowned_subdomain_labels), first_owned_domain)
def nice_true_ces(tc): cause_list = [] next_list = [] cause = '<--' effect = '-->' for event in tc: if event.direction == Direction.CAUSE: cause_list.append(["{0:.4f}".format(round(event.alpha, 4)), event.mechanism, cause, event.purview]) elif event.direction == Direction.EFFECT: next_list.append(["{0:.4f}".format(round(event.alpha, 4)), event.mechanism, effect, event.purview]) else: validate.direction(event.direction) true_list = [(cause_list[event], next_list[event]) for event in range(len(cause_list))] return true_list
Format a true |CauseEffectStructure|.
def configure_app(app, config=None, config_obj=None): app.config.from_object(config_obj or BaseConfig) if config is not None: app.config.from_pyfile(config)
Configure application instance. Args: app (Flask): initialized Flask app instance config (Optional[path]): path to a Python module config file config_obj (Optional[class]): Python config object
def _prepare_sort_by_score(self, values, sort_options): base_tmp_key, tmp_keys = self._zset_to_keys( key=self._sort_by_sortedset['by'], values=values, ) sort_options['by'] = '%s:*' % base_tmp_key for key in ('desc', 'alpha', 'get', 'store'): if key in self._sort_by_sortedset: sort_options[key] = self._sort_by_sortedset[key] if sort_options.get('get'): try: pos = sort_options['get'].index(SORTED_SCORE) except: pass else: sort_options['get'][pos] = '%s:*' % base_tmp_key return base_tmp_key, tmp_keys
Create the key to sort on the sorted set references in self._sort_by_sortedset and adapte sort options
def parts(): parts = { 'Canon': [ _ for _ in range(1, 5) ], 'Apostle': [ 5 ], 'Paul': [ _ for _ in range(6, 19) ], 'General': [ _ for _ in range(19, 26) ], 'Apocalypse': [ 27 ] } return parts
Returns the dictionary with the part as key and the contained book as indices.
def _samples_dicts_to_array(samples_dicts, labels): itersamples = iter(samples_dicts) first_sample = next(itersamples) if labels is None: labels = list(first_sample) num_variables = len(labels) def _iter_samples(): yield np.fromiter((first_sample[v] for v in labels), count=num_variables, dtype=np.int8) try: for sample in itersamples: yield np.fromiter((sample[v] for v in labels), count=num_variables, dtype=np.int8) except KeyError: msg = ("Each dict in 'samples' must have the same keys.") raise ValueError(msg) return np.stack(list(_iter_samples())), labels
Convert an iterable of samples where each sample is a dict to a numpy 2d array. Also determines the labels is they are None.
def focusPrev(self, event): try: event.widget.tk_focusPrev().focus_set() except TypeError: name = event.widget.tk.call('tk_focusPrev', event.widget._w) event.widget._nametowidget(str(name)).focus_set()
Set focus to previous item in sequence
def switch_to_json_payload_encoding(call_fn, response_class): def json_serializer(*args, **kwargs): return bytes(json_format.MessageToJson(args[0], True, preserving_proto_field_name=True), "utf-8") def json_deserializer(*args, **kwargs): resp = response_class() json_format.Parse(args[0], resp, True) return resp call_fn._request_serializer = json_serializer call_fn._response_deserializer = json_deserializer
Switch payload encoding to JSON for GRPC call
def write(self, address, size, value): for i in range(0, size): self.__write_byte(address + i, (value >> (i * 8)) & 0xff)
Write arbitrary size content to memory.
def _cookies_for_request(self, request): cookies = [] for domain in self._cookies.keys(): cookies.extend(self._cookies_for_domain(domain, request)) return cookies
Return a list of cookies to be returned to server.
def put_feature(ctx, dataset, fid, feature, input): if feature is None: stdin = click.open_file(input, 'r') feature = stdin.read() feature = json.loads(feature) service = ctx.obj.get('service') res = service.update_feature(dataset, fid, feature) if res.status_code == 200: click.echo(res.text) else: raise MapboxCLIException(res.text.strip())
Create or update a dataset feature. The semantics of HTTP PUT apply: if the dataset has no feature with the given `fid` a new feature will be created. Returns a GeoJSON representation of the new or updated feature. $ mapbox datasets put-feature dataset-id feature-id 'geojson-feature' All endpoints require authentication. An access token with `datasets:write` scope is required, see `mapbox --help`.
def _verifyHostKey(self, hostKey, fingerprint): if fingerprint in self.knownHosts: return defer.succeed(True) return defer.fail(UnknownHostKey(hostKey, fingerprint))
Called when ssh transport requests us to verify a given host key. Return a deferred that callback if we accept the key or errback if we decide to reject it.
def errors(request, *args, **kwargs): search_term = request.GET.get('q', None) if '400' in search_term: return HttpResponseBadRequest(MESSAGE_400) elif '403' in search_term: return HttpResponseForbidden(MESSAGE_403) elif '404' in search_term: return HttpResponseNotFound(MESSAGE_404) elif '405' in search_term: return HttpResponseNotAllowed(['PATCH'], MESSAGE_405) return HttpResponseServerError(MESSAGE_500)
A dummy view that will throw errors. It'll throw any HTTP error that is contained in the search query.
def add_basemap(ax, zoom=12): url = ctx.sources.ST_TONER_LITE xmin, xmax, ymin, ymax = ax.axis() basemap, extent = ctx.bounds2img(xmin, ymin, xmax, ymax, zoom=zoom, url=url) ax.imshow(basemap, extent=extent, interpolation='bilinear') ax.axis((xmin, xmax, ymin, ymax))
Adds map to a plot.
def _validate_codeblock_size(self, cparams): if cparams.cblockw_init != 0 and cparams.cblockh_init != 0: width = cparams.cblockw_init height = cparams.cblockh_init if height * width > 4096 or height < 4 or width < 4: msg = ("The code block area is specified as " "{height} x {width} = {area} square pixels. " "Code block area cannot exceed 4096 square pixels. " "Code block height and width dimensions must be larger " "than 4 pixels.") msg = msg.format(height=height, width=width, area=height * width) raise IOError(msg) if ((math.log(height, 2) != math.floor(math.log(height, 2)) or math.log(width, 2) != math.floor(math.log(width, 2)))): msg = ("Bad code block size ({height} x {width}). " "The dimensions must be powers of 2.") msg = msg.format(height=height, width=width) raise IOError(msg)
Code block dimensions must satisfy certain restrictions. They must both be a power of 2 and the total area defined by the width and height cannot be either too great or too small for the codec.
def elapsed(t0=0.0): now = time() dt = now - t0 dt_sec = Decimal(str(dt)).quantize(Decimal('.0001'), rounding=ROUND_DOWN) if dt_sec <= 1: dt_str = str(dt_sec) + ' second' else: dt_str = str(dt_sec) + ' seconds' return now, dt_str
get elapsed time from the give time Returns: now: the absolute time now dt_str: elapsed time in string
def create_empty(self, name=None, renderers=None, RootNetworkList=None, verbose=False): PARAMS=set_param(["name","renderers","RootNetworkList"],[name,renderers,RootNetworkList]) response=api(url=self.__url+"/create empty", PARAMS=PARAMS, method="POST", verbose=verbose) return response
Create a new, empty network. The new network may be created as part of an existing network collection or a new network collection. :param name (string, optional): Enter the name of the new network. :param renderers (string, optional): Select the renderer to use for the new network view. By default, the standard Cytoscape 2D renderer (Ding) will be used = [''], :param RootNetworkList (string, optional): Choose the network collection the new network should be part of. If no network collection is selected, a new network collection is created. = [' -- Create new network collection --', 'cy:command_documentation_generation'] :param verbose: print more
def deserialize(klass, data): handler = DESERIALIZE_REGISTRY.get(klass) if handler: return handler(data) raise TypeError("There is no deserializer registered to handle " "instances of '{}'".format(klass.__name__))
Helper function to access a method that creates objects of a given `klass` with the received `data`.
def cmd_async(self, low): fun = low.pop('fun') return self.asynchronous(fun, low)
Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'}
def runjava(self, classpath, main, jvm_options=None, args=None, workunit_name=None, workunit_labels=None, workunit_log_config=None, dist=None): executor = self.create_java_executor(dist=dist) create_synthetic_jar = self.execution_strategy != self.NAILGUN try: return util.execute_java(classpath=classpath, main=main, jvm_options=jvm_options, args=args, executor=executor, workunit_factory=self.context.new_workunit, workunit_name=workunit_name, workunit_labels=workunit_labels, workunit_log_config=workunit_log_config, create_synthetic_jar=create_synthetic_jar, synthetic_jar_dir=self._executor_workdir) except executor.Error as e: raise TaskError(e)
Runs the java main using the given classpath and args. If --execution-strategy=subprocess is specified then the java main is run in a freshly spawned subprocess, otherwise a persistent nailgun server dedicated to this Task subclass is used to speed up amortized run times. :API: public
def format_exc(*exc_info): typ, exc, tb = exc_info or sys.exc_info() error = traceback.format_exception(typ, exc, tb) return "".join(error)
Show exception with traceback.
def get_requests_for_local_unit(relation_name=None): local_name = local_unit().replace('/', '_') raw_certs_key = '{}.processed_requests'.format(local_name) relation_name = relation_name or 'certificates' bundles = [] for rid in relation_ids(relation_name): for unit in related_units(rid): data = relation_get(rid=rid, unit=unit) if data.get(raw_certs_key): bundles.append({ 'ca': data['ca'], 'chain': data.get('chain'), 'certs': json.loads(data[raw_certs_key])}) return bundles
Extract any certificates data targeted at this unit down relation_name. :param relation_name: str Name of relation to check for data. :returns: List of bundles of certificates. :rtype: List of dicts
def disconnect(self, client): self.clients.remove(client) del self.connect_args[client] client.disconnect()
Remove client from pool.
def dict_to_path(as_dict): result = as_dict.copy() loaders = {'Arc': Arc, 'Line': Line} entities = [None] * len(as_dict['entities']) for entity_index, entity in enumerate(as_dict['entities']): entities[entity_index] = loaders[entity['type']]( points=entity['points'], closed=entity['closed']) result['entities'] = entities return result
Turn a pure dict into a dict containing entity objects that can be sent directly to a Path constructor. Parameters ----------- as_dict : dict Has keys: 'vertices', 'entities' Returns ------------ kwargs : dict Has keys: 'vertices', 'entities'
def _depr(fn, usage, stacklevel=3): warn('{0} is deprecated. Use {1} instead'.format(fn, usage), stacklevel=stacklevel, category=DeprecationWarning)
Internal convenience function for deprecation warnings
def get_errors_for_state_name(self, name): return_value = None for state_id, name_outcome_tuple in self.child_errors.items(): if name_outcome_tuple[0] == name: return_value = name_outcome_tuple[1] break return return_value
Returns the error message of the child state specified by name. Note: This is utility function that is used by the programmer to make a decision based on the final outcome of its child states. A state is not uniquely specified by the name, but as the programmer normally does not want to use state-ids in his code this utility function was defined. :param name: The name of the state to get the error message for :return:
def add_class(self, className): if className not in self._dom_classes: self._dom_classes = list(self._dom_classes) + [className] return self
Adds a class to the top level element of the widget. Doesn't add the class if it already exists.
async def on_raw_join(self, message): nick, metadata = self._parse_user(message.source) self._sync_user(nick, metadata) channels = message.params[0].split(',') if self.is_same_nick(self.nickname, nick): for channel in channels: if not self.in_channel(channel): self._create_channel(channel) await self.rawmsg('MODE', channel) else: for channel in channels: if self.in_channel(channel): self.channels[channel]['users'].add(nick) for channel in channels: await self.on_join(channel, nick)
JOIN command.
def cublasStrmm(handle, side, uplo, trans, diag, m, n, alpha, A, lda, B, ldb, C, ldc): status = _libcublas.cublasStrmm_v2(handle, _CUBLAS_SIDE_MODE[side], _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans], _CUBLAS_DIAG[diag], m, n, ctypes.byref(ctypes.c_float(alpha)), int(A), lda, int(B), ldb, int(C), ldc) cublasCheckStatus(status)
Matrix-matrix product for real triangular matrix.
def file_client(self): if not self._file_client: self._file_client = salt.fileclient.get_file_client( self.opts, self.pillar_rend) return self._file_client
Return a file client. Instantiates on first call.
def set_image(self, image, add_to_canvas=True): if not isinstance(image, BaseImage.BaseImage): raise ValueError("Wrong type of object to load: %s" % ( str(type(image)))) canvas_img = self.get_canvas_image() old_image = canvas_img.get_image() self.make_callback('image-unset', old_image) with self.suppress_redraw: canvas_img.set_image(image) if add_to_canvas: try: self.canvas.get_object_by_tag(self._canvas_img_tag) except KeyError: self.canvas.add(canvas_img, tag=self._canvas_img_tag) self.canvas.lower_object(canvas_img)
Set an image to be displayed. If there is no error, the ``'image-unset'`` and ``'image-set'`` callbacks will be invoked. Parameters ---------- image : `~ginga.AstroImage.AstroImage` or `~ginga.RGBImage.RGBImage` Image object. add_to_canvas : bool Add image to canvas.
def serie(self, serie): return dict( plot=self.node( self.graph.nodes['plot'], class_='series serie-%d color-%d' % (serie.index, serie.index) ), overlay=self.node( self.graph.nodes['overlay'], class_='series serie-%d color-%d' % (serie.index, serie.index) ), text_overlay=self.node( self.graph.nodes['text_overlay'], class_='series serie-%d color-%d' % (serie.index, serie.index) ) )
Make serie node
def get_values(self): attrs = vars(self).copy() attrs.pop('_server_config') attrs.pop('_fields') attrs.pop('_meta') if '_path_fields' in attrs: attrs.pop('_path_fields') return attrs
Return a copy of field values on the current object. This method is almost identical to ``vars(self).copy()``. However, only instance attributes that correspond to a field are included in the returned dict. :return: A dict mapping field names to user-provided values.
def getAnalysisServicesVocabulary(self): bsc = getToolByName(self, 'bika_setup_catalog') brains = bsc(portal_type='AnalysisService', is_active=True) items = [(b.UID, b.Title) for b in brains] items.insert(0, ("", "")) items.sort(lambda x, y: cmp(x[1], y[1])) return DisplayList(list(items))
Get all active Analysis Services from Bika Setup and return them as Display List.
def correct_rates(rates, opt_qes, combs): corrected_rates = np.array([ rate / opt_qes[comb[0]] / opt_qes[comb[1]] for rate, comb in zip(rates, combs) ]) return corrected_rates
Applies optimal qes to rates. Should be closer to fitted_rates afterwards. Parameters ---------- rates: numpy array of rates of all PMT combinations opt_qes: numpy array of optimal qe values for all PMTs combs: pmt combinations used to correct Returns ------- corrected_rates: numpy array of corrected rates for all PMT combinations
def plot(values, mode_names, title, (xlabel, ylabel), out_file): matplotlib.pyplot.clf() for mode, mode_name in mode_names.iteritems(): vals = values[mode] matplotlib.pyplot.plot( [x for x, _ in vals], [y for _, y in vals], label=mode_name ) matplotlib.pyplot.title(title) matplotlib.pyplot.xlabel(xlabel) matplotlib.pyplot.ylabel(ylabel) if len(mode_names) > 1: matplotlib.pyplot.legend() matplotlib.pyplot.savefig(out_file)
Plot a diagram
def _get_seo_content_types(seo_models): try: return [ContentType.objects.get_for_model(m).id for m in seo_models] except Exception: return []
Returns a list of content types from the models defined in settings.
def awaitTermination(self, timeout=None): if timeout is not None: IOLoop.current().call_later(timeout, self.stop) IOLoop.current().start() IOLoop.clear_current()
Wait for context to stop. :param float timeout: in seconds
def starts(self, layer): starts = [] for data in self[layer]: starts.append(data[START]) return starts
Retrieve start positions of elements if given layer.
def get_header_as_text(file_content, reference_id): res = _CONTENT_PATTERN.findall(file_content) if len(res) == 2: content = res[0] elif len(res) == 1: return '' else: raise ValueError('Unexpected <code><pre> sections: "%r"' % res) return _clean_html(content)
\ Returns the cable's header as text. `file_content` The HTML file content, c.f. `get_file_content`.
def decrease_posts_count_after_post_deletion(sender, instance, **kwargs): if not instance.approved: return try: assert instance.poster_id is not None poster = User.objects.get(pk=instance.poster_id) except AssertionError: return except ObjectDoesNotExist: return profile, dummy = ForumProfile.objects.get_or_create(user=poster) if profile.posts_count: profile.posts_count = F('posts_count') - 1 profile.save()
Decreases the member's post count after a post deletion. This receiver handles the deletion of a forum post: the posts count related to the post's author is decreased.
def _get_peer_connection(self, blacklist=None): blacklist = blacklist or set() peer = None connection = None while connection is None: peer = self._choose(blacklist) if not peer: raise NoAvailablePeerError( "Can't find an available peer for '%s'" % self.service ) try: connection = yield peer.connect() except NetworkError as e: log.info( 'Failed to connect to %s. Trying a different host.', peer.hostport, exc_info=e, ) connection = None blacklist.add(peer.hostport) raise gen.Return((peer, connection))
Find a peer and connect to it. Returns a ``(peer, connection)`` tuple. Raises ``NoAvailablePeerError`` if no healthy peers are found. :param blacklist: If given, a set of hostports for peers that we must not try.
def set(self, path, item, replace): if len(path) == 0: if self._item is None or replace: self._item = item return self._item else: head, tail = path[0], path[1:] if head.startswith(':'): default = (head[1:], self.__class__()) _, rtree = self._subtrees.setdefault(self._WILDCARD, default) return rtree.set(tail, item, replace) else: rtree = self._subtrees.setdefault(head, self.__class__()) return rtree.set(tail, item, replace)
Sets item for `path` and returns the item. Replaces existing item with `item` when `replace` is true :param path: Path for item :param item: New item :param replace: Updating mode :type path: list :type item: object :type replace: bool
def execute(self): if self._decode_output: with Popen(self.command, shell=True, stdout=PIPE) as process: self._output = [i.decode("utf-8").strip() for i in process.stdout] self._success = True else: os.system(self.command) self._success = True return self
Execute a system command.
def shell(state, host, commands, chdir=None): if isinstance(commands, six.string_types): commands = [commands] for command in commands: if chdir: yield 'cd {0} && ({1})'.format(chdir, command) else: yield command
Run raw shell code. + commands: command or list of commands to execute on the remote server + chdir: directory to cd into before executing commands
def gtpswd(prompt, confirmPassword): try: return util.getPassword(prompt=prompt, confirmPrompt=confirmPassword, confirm=True) except TypeError: return util.getPassword(prompt=prompt, confirm=True)
Temporary wrapper for Twisted's getPassword until a version that supports customizing the 'confirm' prompt is released.
def trace_module(module, tracer=tracer, pattern=r".*", flags=0): if is_traced(module): return False global REGISTERED_MODULES for name, function in inspect.getmembers(module, inspect.isfunction): if name not in module.__all__ or not re.search(pattern, name, flags=flags): continue trace_function(module, function, tracer) for name, cls in inspect.getmembers(module, inspect.isclass): if name not in module.__all__ or not re.search(pattern, name, flags=flags): continue trace_class(cls, tracer, pattern, flags) REGISTERED_MODULES.add(module) set_traced(module) return True
Traces given module members using given tracer. :param module: Module to trace. :type module: ModuleType :param tracer: Tracer. :type tracer: object :param pattern: Matching pattern. :type pattern: unicode :param flags: Matching regex flags. :type flags: int :return: Definition success. :rtype: bool :note: Only members exported by **__all__** attribute will be traced.
def get_value(cls, object_version, key): obj = cls.get(object_version, key) return obj.value if obj else None
Get the tag value.
def matrix_to_images(data_matrix, mask): if data_matrix.ndim > 2: data_matrix = data_matrix.reshape(data_matrix.shape[0], -1) numimages = len(data_matrix) numVoxelsInMatrix = data_matrix.shape[1] numVoxelsInMask = (mask >= 0.5).sum() if numVoxelsInMask != numVoxelsInMatrix: raise ValueError('Num masked voxels %i must match data matrix %i' % (numVoxelsInMask, numVoxelsInMatrix)) imagelist = [] for i in range(numimages): img = mask.clone() img[mask >= 0.5] = data_matrix[i,:] imagelist.append(img) return imagelist
Unmasks rows of a matrix and writes as images ANTsR function: `matrixToImages` Arguments --------- data_matrix : numpy.ndarray each row corresponds to an image array should have number of columns equal to non-zero voxels in the mask mask : ANTsImage image containing a binary mask. Rows of the matrix are unmasked and written as images. The mask defines the output image space Returns ------- list of ANTsImage types
def extract_symbol_app(parser, _, args): parser.add_argument('file', help='ELF file to extract a symbol from') parser.add_argument('symbol', help='the symbol to extract') args = parser.parse_args(args) return ELF(args.file).get_symbol(args.symbol).content
Extract a symbol from an ELF file.
def error_state(self): self.buildstate.state.lasttime = time() self.buildstate.commit() return self.buildstate.state.error
Set the error condition
def copy(self): parser_copy = self.__class__(self.argument_class, self.namespace_class) parser_copy.args = deepcopy(self.args) parser_copy.trim = self.trim parser_copy.bundle_errors = self.bundle_errors return parser_copy
Creates a copy of this RequestParser with the same set of arguments
def fastaReadHeaders(fasta): headers = [] fileHandle = open(fasta, 'r') line = fileHandle.readline() while line != '': assert line[-1] == '\n' if line[0] == '>': headers.append(line[1:-1]) line = fileHandle.readline() fileHandle.close() return headers
Returns a list of fasta header lines, excluding
def community_neighbors(c_j, reverse_index_rows, unavailable_communities, unavailable_communities_counter): indices = list() extend = indices.extend for node in c_j: extend(reverse_index_rows[node]) indices = np.array(indices) indices = np.setdiff1d(indices, unavailable_communities[:unavailable_communities_counter+1]) return indices
Finds communities with shared nodes to a seed community. Called by mroc. Inputs: - c_j: The seed community for which we want to find which communities overlap. - reverse_index_rows: A node to community indicator matrix. - unavailable_communities: A set of communities that have already either been merged or failed to merge. - unavailable_communities_counter: The number of such communities. Outputs: - indices: An array containing the communities that exhibit overlap with the seed community.
def tmatrix_cov(C, row=None): r if row is None: alpha = C + 1.0 alpha0 = alpha.sum(axis=1) norm = alpha0 ** 2 * (alpha0 + 1.0) Z = -alpha[:, :, np.newaxis] * alpha[:, np.newaxis, :] ind = np.diag_indices(C.shape[0]) Z[:, ind[0], ind[1]] += alpha0[:, np.newaxis] * alpha cov = Z / norm[:, np.newaxis, np.newaxis] return cov else: alpha = C[row, :] + 1.0 return dirichlet_covariance(alpha)
r"""Covariance tensor for the non-reversible transition matrix ensemble Normally the covariance tensor cov(p_ij, p_kl) would carry four indices (i,j,k,l). In the non-reversible case rows are independent so that cov(p_ij, p_kl)=0 for i not equal to k. Therefore the function will only return cov(p_ij, p_ik). Parameters ---------- C : (M, M) ndarray Count matrix row : int (optional) If row is given return covariance matrix for specified row only Returns ------- cov : (M, M, M) ndarray Covariance tensor
def find_ruuvitags(bt_device=''): log.info('Finding RuuviTags. Stop with Ctrl+C.') datas = dict() for new_data in RuuviTagSensor._get_ruuvitag_datas(bt_device=bt_device): if new_data[0] in datas: continue datas[new_data[0]] = new_data[1] log.info(new_data[0]) log.info(new_data[1]) return datas
Find all RuuviTags. Function will print the mac and the state of the sensors when found. Function will execute as long as it is stopped. Stop ecexution with Crtl+C. Returns: dict: MAC and state of found sensors
def nodes(**kwargs): cfg = _setup_conn(**kwargs) try: api_instance = kubernetes.client.CoreV1Api() api_response = api_instance.list_node() return [k8s_node['metadata']['name'] for k8s_node in api_response.to_dict().get('items')] except (ApiException, HTTPError) as exc: if isinstance(exc, ApiException) and exc.status == 404: return None else: log.exception('Exception when calling CoreV1Api->list_node') raise CommandExecutionError(exc) finally: _cleanup(**cfg)
Return the names of the nodes composing the kubernetes cluster CLI Examples:: salt '*' kubernetes.nodes salt '*' kubernetes.nodes kubeconfig=/etc/salt/k8s/kubeconfig context=minikube
def _range_from_slice(myslice, start=None, stop=None, step=None, length=None): assert isinstance(myslice, slice) step = myslice.step if myslice.step is not None else step if step is None: step = 1 start = myslice.start if myslice.start is not None else start if start is None: start = 0 stop = myslice.stop if myslice.stop is not None else stop if length is not None: stop_inferred = floor(start + step * length) if stop is not None and stop < stop_inferred: raise ValueError("'stop' ({stop}) and ".format(stop=stop) + "'length' ({length}) ".format(length=length) + "are not compatible.") stop = stop_inferred if stop is None and length is None: raise ValueError("'stop' and 'length' cannot be both unspecified.") myrange = np.arange(start, stop, step) if length is not None: assert len(myrange) == length return myrange
Convert a slice to an array of integers.
def cliques(self, xg): g = nx.DiGraph() for (x,y) in self.merged_ontology.get_graph().edges(): g.add_edge(x,y) for (x,y) in xg.edges(): g.add_edge(x,y) g.add_edge(y,x) return list(strongly_connected_components(g))
Return all equivalence set cliques, assuming each edge in the xref graph is treated as equivalent, and all edges in ontology are subClassOf Arguments --------- xg : Graph an xref graph Returns ------- list of sets
def _add_resources(data, runtime): if "config" not in data: data["config"] = {} resources = data.get("resources", {}) or {} if isinstance(resources, six.string_types) and resources.startswith(("{", "[")): resources = json.loads(resources) data["resources"] = resources assert isinstance(resources, dict), (resources, data) data["config"]["resources"] = resources memory = int(float(runtime["ram"]) / float(runtime["cores"])) data["config"]["resources"].update({"default": {"cores": int(runtime["cores"]), "memory": "%sM" % memory, "jvm_opts": ["-Xms%sm" % min(1000, memory // 2), "-Xmx%sm" % memory]}}) data["config"]["algorithm"]["num_cores"] = int(runtime["cores"]) return data
Merge input resources with current CWL runtime parameters.
def demote(self, amount_, *queries, **kw): q = Q() for query in queries: q += query q += Q(**kw) return self._clone(next_step=('demote', (amount_, q)))
Returns a new S instance with boosting query and demotion. You can demote documents that match query criteria:: q = (S().query(title='trucks') .demote(0.5, description__match='gross')) q = (S().query(title='trucks') .demote(0.5, Q(description__match='gross'))) This is implemented using the boosting query in Elasticsearch. Anything you specify with ``.query()`` goes into the positive section. The negative query and negative boost portions are specified as the first and second arguments to ``.demote()``. .. Note:: Calling this again will overwrite previous ``.demote()`` calls.
def get_shell(self): if (not hasattr(self.shell, 'get_doc') or (hasattr(self.shell, 'is_running') and not self.shell.is_running())): self.shell = None if self.main.ipyconsole is not None: shell = self.main.ipyconsole.get_current_shellwidget() if shell is not None and shell.kernel_client is not None: self.shell = shell if self.shell is None: self.shell = self.internal_shell return self.shell
Return shell which is currently bound to Help, or another running shell if it has been terminated
def encode_message(self): if not self._message: raise ValueError("No message data to encode.") cloned_data = self._message.clone() self._populate_message_attributes(cloned_data) encoded_data = [] c_uamqp.get_encoded_message_size(cloned_data, encoded_data) return b"".join(encoded_data)
Encode message to AMQP wire-encoded bytearray. :rtype: bytearray
def to_json(self): json_data = dict() for field_name, field_obj in self._fields.items(): if isinstance(field_obj, NestedDocumentField): nested_document = field_obj.__get__(self, self.__class__) value = None if nested_document is None else nested_document.to_json() elif isinstance(field_obj, BaseField): value = field_obj.__get__(self, self.__class__) value = field_obj.to_json(value) else: continue if value is None: continue json_data[field_name] = value return json_data
Converts given document to JSON dict.
def _copy(self): ins = copy.copy(self) ins._fire_page_number(self.page_number + 1) return ins
needs to update page numbers
def raw_name(self): parts = self.raw_parts if self.is_absolute(): parts = parts[1:] if not parts: return "" else: return parts[-1] else: return parts[-1]
The last part of raw_parts.
async def georadiusbymember(self, name, member, radius, unit=None, withdist=False, withcoord=False, withhash=False, count=None, sort=None, store=None, store_dist=None): return await self._georadiusgeneric('GEORADIUSBYMEMBER', name, member, radius, unit=unit, withdist=withdist, withcoord=withcoord, withhash=withhash, count=count, sort=sort, store=store, store_dist=store_dist)
This command is exactly like ``georadius`` with the sole difference that instead of taking, as the center of the area to query, a longitude and latitude value, it takes the name of a member already existing inside the geospatial index represented by the sorted set.
def _req_lixian_task_lists(self, page=1): url = 'http://115.com/lixian/' params = {'ct': 'lixian', 'ac': 'task_lists'} self._load_signatures() data = { 'page': page, 'uid': self.user_id, 'sign': self._signatures['offline_space'], 'time': self._lixian_timestamp, } req = Request(method='POST', url=url, params=params, data=data) res = self.http.send(req) if res.state: self._task_count = res.content['count'] self._task_quota = res.content['quota'] return res.content['tasks'] else: msg = 'Failed to get tasks.' raise RequestFailure(msg)
This request will cause the system to create a default downloads directory if it does not exist
def fetch(self, endpoint, data = None): payload = { "lastServerChangeId": "-1", "csrf": self.__csrf, "apiClient": "WEB" } if data is not None: payload.update(data) return self.post(endpoint, payload)
for getting data after logged in
def execute(self, cmd, cwd): self.output = "" env = os.environ.copy() env.update(self.env) if six.PY2: if self._stdout == self.DEVNULL: self._stdout = open(os.devnull, 'w+b') if self._stderr == self.DEVNULL: self._stderr = open(os.devnull, 'w+b') proc = subprocess.Popen( cmd, stdout=self._stdout, stderr=self._stderr, bufsize=0, universal_newlines=True, cwd=cwd, env=env, close_fds=ON_POSIX ) for line in self._unbuffered(proc): self.line_handler(line) return_code = proc.poll() if return_code: logger.error(self.output) raise subprocess.CalledProcessError( return_code, cmd, output=str(self.output) ) return self.output
Execute commands and output this :param cmd: -- list of cmd command and arguments :type cmd: list :param cwd: -- workdir for executions :type cwd: str,unicode :return: -- string with full output :rtype: str
def delete_attribute_group(group_id, **kwargs): user_id = kwargs['user_id'] try: group_i = db.DBSession.query(AttrGroup).filter(AttrGroup.id==group_id).one() group_i.project.check_write_permission(user_id) db.DBSession.delete(group_i) db.DBSession.flush() log.info("Group %s in project %s deleted", group_i.id, group_i.project_id) except NoResultFound: raise HydraError('No Attribute Group %s was found', group_id) return 'OK'
Delete an attribute group.
def _parse_dtype(self, space): if isinstance(space, gym.spaces.Discrete): return tf.int32 if isinstance(space, gym.spaces.Box): return tf.float32 raise NotImplementedError()
Get a tensor dtype from a OpenAI Gym space. Args: space: Gym space. Raises: NotImplementedError: For spaces other than Box and Discrete. Returns: TensorFlow data type.
def create(self, name, kind, **kwargs): kindpath = self.kindpath(kind) self.post(kindpath, name=name, **kwargs) name = UrlEncoded(name, encode_slash=True) path = _path( self.path + kindpath, '%s:%s' % (kwargs['restrictToHost'], name) \ if 'restrictToHost' in kwargs else name ) return Input(self.service, path, kind)
Creates an input of a specific kind in this collection, with any arguments you specify. :param `name`: The input name. :type name: ``string`` :param `kind`: The kind of input: - "ad": Active Directory - "monitor": Files and directories - "registry": Windows Registry - "script": Scripts - "splunktcp": TCP, processed - "tcp": TCP, unprocessed - "udp": UDP - "win-event-log-collections": Windows event log - "win-perfmon": Performance monitoring - "win-wmi-collections": WMI :type kind: ``string`` :param `kwargs`: Additional arguments (optional). For more about the available parameters, see `Input parameters <http://dev.splunk.com/view/SP-CAAAEE6#inputparams>`_ on Splunk Developer Portal. :type kwargs: ``dict`` :return: The new :class:`Input`.
def _maybe_repeat(self, x): if isinstance(x, list): assert len(x) == self.n return x else: return [x] * self.n
Utility function for processing arguments that are singletons or lists. Args: x: either a list of self.n elements, or not a list. Returns: a list of self.n elements.
def get_v_total_stress_at_depth(self, z): if not hasattr(z, "__len__"): return self.one_vertical_total_stress(z) else: sigma_v_effs = [] for value in z: sigma_v_effs.append(self.one_vertical_total_stress(value)) return np.array(sigma_v_effs)
Determine the vertical total stress at depth z, where z can be a number or an array of numbers.
def parse_changes(): with open('CHANGES') as changes: for match in re.finditer(RE_CHANGES, changes.read(1024), re.M): if len(match.group(1)) != len(match.group(3)): error('incorrect underline in CHANGES') date = datetime.datetime.strptime(match.group(4), '%Y-%m-%d').date() if date != datetime.date.today(): error('release date is not today') return match.group(2) error('invalid release entry in CHANGES')
grab version from CHANGES and validate entry
def add_node(self, agent_type=None, state=None, name='network_process', **state_params): agent_id = int(len(self.global_topology.nodes())) agent = agent_type(self.env, agent_id=agent_id, state=state, name=name, **state_params) self.global_topology.add_node(agent_id, {'agent': agent}) return agent_id
Add a new node to the current network Parameters ---------- agent_type : NetworkAgent subclass Agent in the new node will be instantiated using this agent class state : object State of the Agent, this may be an integer or string or any other name : str, optional Descriptive name of the agent state_params : keyword arguments, optional Key-value pairs of other state parameters for the agent Return ------ int Agent ID of the new node
def hash_str(data, hasher=None): hasher = hasher or hashlib.sha1() hasher.update(data) return hasher
Checksum hash a string.
def add_parent(self,node): if not isinstance(node, (CondorDAGNode,CondorDAGManNode) ): raise CondorDAGNodeError, "Parent must be a CondorDAGNode or a CondorDAGManNode" self.__parents.append( node )
Add a parent to this node. This node will not be executed until the parent node has run sucessfully. @param node: CondorDAGNode to add as a parent.
def get_next_application_id(nodes): used = set([n.application_id for n in nodes]) pool = set(range(1, 512)) try: return (pool - used).pop() except KeyError: raise IOUError("Cannot create a new IOU VM (limit of 512 VMs on one host reached)")
Calculates free application_id from given nodes :param nodes: :raises IOUError when exceeds number :return: integer first free id
def user(self, username=None, pk=None, **kwargs): _users = self.users(username=username, pk=pk, **kwargs) if len(_users) == 0: raise NotFoundError("No user criteria matches") if len(_users) != 1: raise MultipleFoundError("Multiple users fit criteria") return _users[0]
User of KE-chain. Provides single user of :class:`User` of KE-chain. You can filter on username or id or an advanced filter. :param username: (optional) username to filter :type username: basestring or None :param pk: (optional) id of the user to filter :type pk: basestring or None :param kwargs: Additional filtering keyword=value arguments :type kwargs: dict or None :return: List of :class:`User` :raises NotFoundError: when a user could not be found :raises MultipleFoundError: when more than a single user can be found
def project_layout(proposal, user=None, repo=None, log=None): proposal = proposal.lower() try: os.mkdir(proposal) except FileExistsError: log.info('Skip directory structure, as project seem to already exists') with open('.gitignore', 'w') as f: f.write( ) with open( '/'.join([proposal, '__init__.py']), 'w') as f: f.write( ) travis_yml() log.info('Workig in %s', os.getcwd()) os.listdir('.') subprocess.call(['git','add','.'], ) subprocess.call(['git','commit',"-am'initial commit of %s'" % proposal]) subprocess.call(['git', "push", "origin", "master:master"])
generate the project template proposal is the name of the project, user is an object containing some information about the user. - full name, - github username - email
def get_agent(self, parent, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): if 'get_agent' not in self._inner_api_calls: self._inner_api_calls[ 'get_agent'] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_agent, default_retry=self._method_configs['GetAgent'].retry, default_timeout=self._method_configs['GetAgent'].timeout, client_info=self._client_info, ) request = agent_pb2.GetAgentRequest(parent=parent, ) return self._inner_api_calls['get_agent']( request, retry=retry, timeout=timeout, metadata=metadata)
Retrieves the specified agent. Example: >>> import dialogflow_v2 >>> >>> client = dialogflow_v2.AgentsClient() >>> >>> parent = client.project_path('[PROJECT]') >>> >>> response = client.get_agent(parent) Args: parent (str): Required. The project that the agent to fetch is associated with. Format: ``projects/<Project ID>``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dialogflow_v2.types.Agent` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.