Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
378,300
def readColorLUT(infile, distance_modulus, mag_1, mag_2, mag_err_1, mag_err_2): reader = pyfits.open(infile) distance_modulus_array = reader[].data.field() if not numpy.any(numpy.fabs(distance_modulus_array - distance_modulus) < 1.e-3): logger.warning("Distance modulus %.2f not available in file %s"%(distance_modulus, infile)) logger.warning(+str(distance_modulus_array)) return False distance_modulus_key = %(distance_modulus_array[numpy.argmin(numpy.fabs(distance_modulus_array - distance_modulus))]) bins_mag_err = reader[].data.field() bins_mag_1 = reader[].data.field() bins_mag_2 = reader[].data.field() index_mag_err_1 = numpy.clip(numpy.digitize(mag_err_1, bins_mag_err) - 1, 0, len(bins_mag_err) - 2) index_mag_err_2 = numpy.clip(numpy.digitize(mag_err_2, bins_mag_err) - 1, 0, len(bins_mag_err) - 2) u_color = numpy.zeros(len(mag_1)) for index_mag_err_1_select in range(0, len(bins_mag_err) - 1): for index_mag_err_2_select in range(0, len(bins_mag_err) - 1): cut = numpy.logical_and(index_mag_err_1 == index_mag_err_1_select, index_mag_err_2 == index_mag_err_2_select) if numpy.sum(cut) < 1: continue histo = reader[distance_modulus_key].data.field(%(index_mag_err_1_select, index_mag_err_2_select)) u_color[cut] = ugali.utils.binning.take2D(histo, mag_2[cut], mag_1[cut], bins_mag_2, bins_mag_1) reader.close() return u_color
Take in a color look-up table and return the signal color evaluated for each object. Consider making the argument a Catalog object rather than magnitudes and uncertainties.
378,301
def Group(params, name=None, type=None): atts = {} if name: atts[] = name if type: atts[] = type g = objectify.Element(, attrib=atts) for p in params: g.append(p) return g
Groups together Params for adding under the 'What' section. Args: params(list of :func:`Param`): Parameter elements to go in this group. name(str): Group name. NB ``None`` is valid, since the group may be best identified by its type. type(str): Type of group, e.g. 'complex' (for real and imaginary).
378,302
def opendocx(file): mydoc = zipfile.ZipFile(file) xmlcontent = mydoc.read() document = etree.fromstring(xmlcontent) return document
Open a docx file, return a document XML tree
378,303
def new_from_list(cls, content, fill_title=True, **kwargs): obj = cls(**kwargs) obj.append_from_list(content, fill_title) return obj
Populates the Table with a list of tuples of strings. Args: content (list): list of tuples of strings. Each tuple is a row. fill_title (bool): if true, the first tuple in the list will be set as title
378,304
def block(self, event, emptyEvents = ()): q = self.tree.matchfirst(event) q.block(event) self.blockEvents[event] = q for ee in emptyEvents: ee.queue.waitForEmpty()
Return a recently popped event to queue, and block all later events until unblock. Only the sub-queue directly containing the event is blocked, so events in other queues may still be processed. It is illegal to call block and unblock in different queues with a same event. :param event: the returned event. When the queue is unblocked later, this event will be popped again. :param emptyEvents: reactivate the QueueIsEmptyEvents
378,305
def get_interpolated_data(df: pd.DataFrame, e_min=np.nan, e_max=np.nan, e_step=np.nan): nbr_point = int((e_max - e_min) / e_step + 1) x_axis = np.linspace(e_min, e_max, nbr_point).round(6) y_axis_function = interp1d(x=df[], y=df[], kind=) y_axis = y_axis_function(x_axis) return {: x_axis, : y_axis}
return the interpolated x and y axis for the given x range [e_min, e_max] with step defined :param df: input data frame :type df: pandas.DataFrame :param e_min: left energy range in eV of new interpolated data :type e_min: float :param e_max: right energy range in eV of new interpolated data :type e_max: float :param e_step: energy step in eV for interpolation :type e_step: float :return: x_axis and y_axis of interpolated data over specified range :rtype: dict
378,306
def choose_language(self, lang, request): if not lang: lang = get_language_from_request(request) if lang not in [key for (key, value) in settings.PAGE_LANGUAGES]: raise Http404 if lang and translation.check_for_language(lang): translation.activate(lang) return lang
Deal with the multiple corner case of choosing the language.
378,307
def _apply_shadow_vars(avg_grads): ps_var_grads = [] for grad, var in avg_grads: assert var.name.startswith(), var.name my_name = .join(var.name.split()[1:]) my_name = get_op_tensor_name(my_name)[0] new_v = tf.get_variable(my_name, dtype=var.dtype.base_dtype, initializer=var.initial_value, trainable=True) ps_var_grads.append((grad, new_v)) return ps_var_grads
Create shadow variables on PS, and replace variables in avg_grads by these shadow variables. Args: avg_grads: list of (grad, var) tuples
378,308
async def reinvoke(self, *, call_hooks=False, restart=True): cmd = self.command view = self.view if cmd is None: raise ValueError() index, previous = view.index, view.previous invoked_with = self.invoked_with invoked_subcommand = self.invoked_subcommand subcommand_passed = self.subcommand_passed if restart: to_call = cmd.root_parent or cmd view.index = len(self.prefix) view.previous = 0 view.get_word() else: to_call = cmd try: await to_call.reinvoke(self, call_hooks=call_hooks) finally: self.command = cmd view.index = index view.previous = previous self.invoked_with = invoked_with self.invoked_subcommand = invoked_subcommand self.subcommand_passed = subcommand_passed
|coro| Calls the command again. This is similar to :meth:`~.Context.invoke` except that it bypasses checks, cooldowns, and error handlers. .. note:: If you want to bypass :exc:`.UserInputError` derived exceptions, it is recommended to use the regular :meth:`~.Context.invoke` as it will work more naturally. After all, this will end up using the old arguments the user has used and will thus just fail again. Parameters ------------ call_hooks: :class:`bool` Whether to call the before and after invoke hooks. restart: :class:`bool` Whether to start the call chain from the very beginning or where we left off (i.e. the command that caused the error). The default is to start where we left off.
378,309
def install_json_params(self, ij=None): if self._install_json_params is None or ij is not None: self._install_json_params = {} if ij is None: ij = self.install_json for p in ij.get() or []: self._install_json_params.setdefault(p.get(), p) return self._install_json_params
Return install.json params in a dict with name param as key. Args: ij (dict, optional): Defaults to None. The install.json contents. Returns: dict: A dictionary containing the install.json input params with name as key.
378,310
def authenticate(self, driver): events = [driver.press_return_re, driver.password_re, self.device.prompt_re, pexpect.TIMEOUT] transitions = [ (driver.press_return_re, [0, 1], 1, partial(a_send, "\r\n"), 10), (driver.password_re, [0], 1, partial(a_send_password, self._acquire_password()), _C[]), (driver.password_re, [1], -1, a_authentication_error, 0), (self.device.prompt_re, [0, 1], -1, None, 0), (pexpect.TIMEOUT, [1], -1, ConnectionError("Error getting device prompt") if self.device.is_target else partial(a_send, "\r\n"), 0) ] self.log("EXPECTED_PROMPT={}".format(pattern_to_str(self.device.prompt_re))) fsm = FSM("SSH-AUTH", self.device, events, transitions, init_pattern=self.last_pattern, timeout=30) return fsm.run()
Authenticate using the SSH protocol specific FSM.
378,311
def ensure_local_repo(self): if os.path.exists(self.managed_path): logging.debug(.format(self.remote_path)) return logging.info(.format(self.remote_path)) repo_path_parent = parent_dir(self.managed_path) if not os.path.exists(repo_path_parent): os.makedirs(repo_path_parent) with git_error_handling(): git.Repo.clone_from(self.assemble_remote_path(), self.managed_path)
Given a Dusty repo object, clone the remote into Dusty's local repos directory if it does not already exist.
378,312
def _dict_merge(dct, merge_dct): for k, v in merge_dct.items(): if (k in dct and isinstance(dct[k], dict) and isinstance(merge_dct[k], collections.Mapping)): _dict_merge(dct[k], merge_dct[k]) else: dct[k] = merge_dct[k]
Recursive dict merge. Inspired by :meth:``dict.update()``, instead of updating only top-level keys, dict_merge recurses down into dicts nested to an arbitrary depth, updating keys. The ``merge_dct`` is merged into ``dct``. From https://gist.github.com/angstwad/bf22d1822c38a92ec0a9 Arguments: dct: dict onto which the merge is executed merge_dct: dct merged into dct
378,313
def shell(name=None, **attrs): attrs.setdefault(, Shell) return click.command(name, **attrs)
Creates a new :class:`Shell` with a function as callback. This works otherwise the same as :func:`command` just that the `cls` parameter is set to :class:`Shell`.
378,314
def register_monitors(self, *monitors): for key, node in monitors: if key not in self._registered_monitors: node *= 1.0 self.training_monitors.append((key, node)) self.testing_monitors.append((key, node)) self._registered_monitors.add(key)
Register monitors they should be tuple of name and Theano variable.
378,315
def generate(self, state, size, dataset, backward=False): if isinstance(state, str): state = self.split_state(state) state = self.get_state(state, size) dataset = self.get_dataset(dataset) while True: link, state = self.random_link(dataset, state, backward) if link is None or backward and link == : return yield link
Generate a sequence. Parameters ---------- state : `str` or `iterable` of `str` Initial state. size : `int` State size. dataset : `str` Dataset key. backward : `bool`, optional Link direction. Returns ------- `generator` of `str` Node value generator.
378,316
def mutator(*cache_names): def deco(fn): @wraps(fn) def _fn(self, *args, **kwargs): try: return fn(self, *args, **kwargs) finally: for cache_name in cache_names: setattr(self, cache_name, None) return _fn return deco
Decorator for ``Document`` methods that change the document. This decorator ensures that the object's caches are kept in sync when changes are made.
378,317
def delete_dcnm_out_part(self, tenant_id, fw_dict, is_fw_virt=False): res = fw_const.DCNM_OUT_PART_DEL_SUCCESS tenant_name = fw_dict.get() ret = True try: self._delete_partition(tenant_id, tenant_name) except Exception as exc: LOG.error("deletion of Out Partition failed for tenant " "%(tenant)s, Exception %(exc)s", {: tenant_id, : str(exc)}) res = fw_const.DCNM_OUT_PART_DEL_FAIL ret = False self.update_fw_db_result(tenant_id, dcnm_status=res) LOG.info("Out partition deleted") return ret
Delete the DCNM OUT partition and update the result.
378,318
def _init(self, clnt): assert clnt, "clnt is None" self._clnt = clnt self._apikey = clnt.apikey() self._version = clnt.conf(YP_VERSION, defval=VERSION_V2) self._charset = clnt.conf(HTTP_CHARSET, defval=CHARSET_UTF8) self._name = self.__class__.__module__.split()[-1]
initialize api by YunpianClient
378,319
def anonymize_user(doc): ip = doc.pop(, None) if ip: doc.update({: get_geoip(ip)}) user_id = doc.pop(, ) session_id = doc.pop(, ) user_agent = doc.pop(, ) timestamp = arrow.get(doc.get()) timeslice = timestamp.strftime() salt = get_anonymization_salt(timestamp) visitor_id = hashlib.sha224(salt.encode()) if user_id: visitor_id.update(user_id.encode()) elif session_id: visitor_id.update(session_id.encode()) elif ip and user_agent: vid = .format(ip, user_agent, timeslice) visitor_id.update(vid.encode()) else: pass unique_session_id = hashlib.sha224(salt.encode()) if user_id: sid = .format(user_id, timeslice) unique_session_id.update(sid.encode()) elif session_id: sid = .format(session_id, timeslice) unique_session_id.update(sid.encode()) elif ip and user_agent: sid = .format(ip, user_agent, timeslice) unique_session_id.update(sid.encode()) doc.update(dict( visitor_id=visitor_id.hexdigest(), unique_session_id=unique_session_id.hexdigest() )) return doc
Preprocess an event by anonymizing user information. The anonymization is done by removing fields that can uniquely identify a user, such as the user's ID, session ID, IP address and User Agent, and hashing them to produce a ``visitor_id`` and ``unique_session_id``. To further secure the method, a randomly generated 32-byte salt is used, that expires after 24 hours and is discarded. The salt values are stored in Redis (or whichever backend Invenio-Cache uses). The ``unique_session_id`` is calculated in the same way as the ``visitor_id``, with the only difference that it also takes into account the hour of the event . All of these rules effectively mean that a user can have a unique ``visitor_id`` for each day and unique ``unique_session_id`` for each hour of a day. This session ID generation process was designed according to the `Project COUNTER Code of Practice <https://www.projectcounter.org/code-of- practice-sections/general-information/>`_. In addition to that the country of the user is extracted from the IP address as a ISO 3166-1 alpha-2 two-letter country code (e.g. "CH" for Switzerland).
378,320
def closest(self, dt1, dt2, *dts): from functools import reduce dt1 = pendulum.instance(dt1) dt2 = pendulum.instance(dt2) dts = [dt1, dt2] + [pendulum.instance(x) for x in dts] dts = [(abs(self - dt), dt) for dt in dts] return min(dts)[1]
Get the farthest date from the instance. :type dt1: datetime.datetime :type dt2: datetime.datetime :type dts: list[datetime.datetime,] :rtype: DateTime
378,321
async def start_pipe_server( client_connected_cb, *, path, loop=None, limit=DEFAULT_LIMIT ): path = path.replace(, ) loop = loop or asyncio.get_event_loop() def factory(): reader = asyncio.StreamReader(limit=limit, loop=loop) protocol = asyncio.StreamReaderProtocol( reader, client_connected_cb, loop=loop, ) return protocol server, *_ = await loop.start_serving_pipe(factory, address=path) closed = asyncio.Event(loop=loop) original_close = server.close def close(): original_close() closed.set() server.close = close server.wait_closed = closed.wait return server
Start listening for connection using Windows named pipes.
378,322
def to_pytime(self): nanoseconds = self._nsec hours = nanoseconds // 1000000000 // 60 // 60 nanoseconds -= hours * 60 * 60 * 1000000000 minutes = nanoseconds // 1000000000 // 60 nanoseconds -= minutes * 60 * 1000000000 seconds = nanoseconds // 1000000000 nanoseconds -= seconds * 1000000000 return datetime.time(hours, minutes, seconds, nanoseconds // 1000)
Converts sql time object into Python's time object this will truncate nanoseconds to microseconds @return: naive time
378,323
def make_wrapper(self, callable_): assert callable(callable_) def wrapper(*args, **kw): return self.call_and_report_errors(callable_, *args, **kw) return wrapper
Given a free-standing function 'callable', return a new callable that will call 'callable' and report all exceptins, using 'call_and_report_errors'.
378,324
def identify_hosting_service(repo_url, hosting_services=HOSTING_SERVICES): repo_url = unicode(repo_url) for service in hosting_services: if service in repo_url: return service raise UnknownHostingService
Determines the hosting service of `repo_url`. :param repo_url: Repo URL of unknown type. :returns: Hosting service or raises UnknownHostingService exception.
378,325
def series2cat(df:DataFrame, *col_names): "Categorifies the columns `col_names` in `df`." for c in listify(col_names): df[c] = df[c].astype().cat.as_ordered()
Categorifies the columns `col_names` in `df`.
378,326
def get_folder_id(folder_name, auth, url): object_list = get_cfg_template(auth=auth, url=url) for template in object_list: if template[] == folder_name: return int(template[]) return "Folder not found"
Helper function takes str input of folder name and returns str numerical id of the folder. :param folder_name: str name of the folder :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: str numerical id of the folder :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.icc import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> default_folder_id = get_folder_id('Default Folder', auth.creds, auth.url) >>> assert type(default_folder_id) is int
378,327
def hex_digit(coord, digit=1): if digit not in [1,2]: raise ValueError(.format( digit )) return int(hex(coord)[1+digit], 16)
Returns either the first or second digit of the hexadecimal representation of the given coordinate. :param coord: hexadecimal coordinate, int :param digit: 1 or 2, meaning either the first or second digit of the hexadecimal :return: int, either the first or second digit
378,328
def delete_country_by_id(cls, country_id, **kwargs): kwargs[] = True if kwargs.get(): return cls._delete_country_by_id_with_http_info(country_id, **kwargs) else: (data) = cls._delete_country_by_id_with_http_info(country_id, **kwargs) return data
Delete Country Delete an instance of Country by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.delete_country_by_id(country_id, async=True) >>> result = thread.get() :param async bool :param str country_id: ID of country to delete. (required) :return: None If the method is called asynchronously, returns the request thread.
378,329
def search_full(self, regex, return_string=True, advance_pointer=True): regex = get_regex(regex) self.match = regex.search(self.string, self.pos) if not self.match: return start_pos = self.pos if advance_pointer: self.pos = self.match.end() if return_string: return self.string[start_pos:self.match.end()] return (self.match.end() - start_pos)
Search from the current position. If `return_string` is false and a match is found, returns the number of characters matched (from the current position *up to* the end of the match). >>> s = Scanner("test string") >>> s.search_full(r' ') 'test ' >>> s.pos 5 >>> s.search_full(r'i', advance_pointer=False) 'stri' >>> s.pos 5 >>> s.search_full(r'i', return_string=False, advance_pointer=False) 4 >>> s.pos 5
378,330
def get_validation_fields(self): return [("name",1,str), ("time",1,int), ("url",1,str), ("description",1, str), ("instructions",1, str), ("exp_id",1,str), ("install",0, list), ("contributors",0, list), ("reference",0, list), ("cognitive_atlas_task_id",0,str), ("template",0,str)]
get_validation_fields returns a list of tuples (each a field) we only require the exp_id to coincide with the folder name, for the sake of reproducibility (given that all are served from sample image or Github organization). All other fields are optional. To specify runtime variables, add to "experiment_variables" 0: not required, no warning 1: required, not valid 2: not required, warning type: indicates the variable type
378,331
def to_list(var): if var is None: return [] if isinstance(var, str): var = var.split() elif not isinstance(var, list): try: var = list(var) except TypeError: raise ValueError("{} cannot be converted to the list.".format(var)) return var
Checks if given value is a list, tries to convert, if it is not.
378,332
def dispatch(self, request, *args, **kwargs): if request.method.lower() in self.http_method_names: handler = getattr(self, request.method.lower(), self.http_method_not_allowed) else: handler = self.http_method_not_allowed return handler(*args, **kwargs)
Redefine parent's method. Called on each new request from user. Main difference between Django's approach and ours - we don't push a 'request' to a method call. We use 'self.request' instead.
378,333
def _configure( self, target, conf=None, logger=None, callconf=None, keepstate=None, modules=None ): result = target self.loadmodules(modules=modules) modules = [] if conf is None: conf = self.conf if logger is None: logger = self.logger if callconf is None: callconf = self.callparams if keepstate is None: keepstate = self.keepstate subcats = {} params = [] sub_conf_prefix = Configurable.SUB_CONF_PREFIX for cat in conf.values(): cname = cat.name if cname.startswith(sub_conf_prefix): subcnames = cname.split(sub_conf_prefix) pname = subcnames[1] fcname = cname[1 + len(pname):] if not fcname: fcname = str(random()) fcat = cat.copy(name=fcname) if pname in subcats: subcats[pname].append(fcat) else: subcats[pname] = [fcat] else: cparams = cat.params params += cparams.values() if callconf and callable(target): conf = self._toconf(params) args, kwargs = self.getcallparams(conf=conf, target=target) result = target = target(*args, **kwargs) for param in params: value, pname = param.value, param.name if pname in subcats: subcallconf = True if keepstate and hasattr(target, pname): subcallconf = False value = getattr(target, pname) cats = subcats[pname] subconf = configuration(*cats) targets = applyconfiguration( targets=[value], conf=subconf, callconf=subcallconf, keepstate=keepstate, modules=modules ) value = targets[0] if param.error: continue elif self.foreigns or param.local: try: setattr(target, pname, value) except Exception: if logger is not None: logger.error( .format( pname, value, target, format_exc() ) ) return result
Configure this class with input conf only if auto_conf or configure is true. This method should be overriden for specific conf :param target: object to configure. self targets by default. :param Configuration conf: configuration model to configure. Default is this conf. :param Logger logger: logger to use. :param bool callconf: if True, use conf in target __call__ parameters. :param bool keepstate: if True recreate sub objects if they already exist. :param list modules: modules to reload before. :return: configured target.
378,334
def process_streamer(self, streamer, callback=None): index = streamer.index if index in self._in_progress_streamers: raise InternalError("You cannot add a streamer again until it has finished streaming.") queue_item = QueuedStreamer(streamer, callback) self._in_progress_streamers.add(index) self._logger.debug("Streamer %d: queued to send %d readings", index, queue_item.initial_count) self._queue.put_nowait(queue_item)
Start streaming a streamer. Args: streamer (DataStreamer): The streamer itself. callback (callable): An optional callable that will be called as: callable(index, success, highest_id_received_from_other_side)
378,335
def _ensure_like_indices(time, panels): n_time = len(time) n_panel = len(panels) u_panels = np.unique(panels) u_time = np.unique(time) if len(u_time) == n_time: time = np.tile(u_time, len(u_panels)) if len(u_panels) == n_panel: panels = np.repeat(u_panels, len(u_time)) return time, panels
Makes sure that time and panels are conformable.
378,336
def get_stop_words(self, language, fail_safe=False): try: language = self.language_codes[language] except KeyError: pass collection = self.LOADED_LANGUAGES_CACHE.get(language) if collection is None: try: collection = self._get_stop_words(language) self.LOADED_LANGUAGES_CACHE[language] = collection except StopWordError as error: if not fail_safe: raise error collection = [] stop_words = StopWord(language, collection) return stop_words
Returns a StopWord object initialized with the stop words collection requested by ``language``. If the requested language is not available a StopWordError is raised. If ``fail_safe`` is set to True, an empty StopWord object is returned.
378,337
def _to_r(o, as_data=False, level=0): if o is None: return "NA" if isinstance(o, basestring): return o if hasattr(o, "r"): return o.r elif isinstance(o, bool): return "TRUE" if o else "FALSE" elif isinstance(o, (list, tuple)): inner = ",".join([_to_r(x, True, level+1) for x in o]) return "c({})".format(inner) if as_data else inner elif isinstance(o, dict): inner = ",".join(["{}={}".format(k, _to_r(v, True, level+1)) for k, v in sorted(o.iteritems(), key=lambda x: x[0])]) return "list({})".format(inner) if as_data else inner return str(o)
Helper function to convert python data structures to R equivalents TODO: a single model for transforming to r to handle * function args * lists as function args
378,338
async def register(self, service): response = await self._api.put("/v1/agent/service/register", data=service) return response.status == 200
Registers a new local service. Returns: bool: ``True`` on success The register endpoint is used to add a new service, with an optional health check, to the local agent. The request body must look like:: { "ID": "redis1", "Name": "redis", "Tags": [ "master", "v1" ], "Address": "127.0.0.1", "Port": 8000, "EnableTagOverride": False, "Check": { "DeregisterCriticalServiceAfter": timedelta(seconds=90), "Script": "/usr/local/bin/check_redis.py", "HTTP": "http://localhost:5000/health", "Interval": timedelta(seconds=10), "TTL": timedelta(seconds=15) } } The **Name** field is mandatory. If an **ID** is not provided, it is set to **Name**. You cannot have duplicate **ID** entries per agent, so it may be necessary to provide an **ID** in the case of a collision. **Tags**, **Address**, **Port**, **Check** and **EnableTagOverride** are optional. If **Address** is not provided or left empty, then the agent's address will be used as the address for the service during DNS queries. When querying for services using HTTP endpoints such as service health or service catalog and encountering an empty **Address** field for a service, use the **Address** field of the agent node associated with that instance of the service, which is returned alongside the service information. If **Check** is provided, only one of **Script**, **HTTP**, **TCP** or **TTL** should be specified. **Script** and **HTTP** also require **Interval**. The created check will be named "service:<ServiceId>". Checks that are associated with a service may also contain an optional **DeregisterCriticalServiceAfter** field, which is a timeout in the same format as **Interval** and **TTL**. If a check is in the critical state for more than this configured value, then its associated service (and all of its associated checks) will automatically be deregistered. The minimum timeout is 1 minute, and the process that reaps critical services runs every 30 seconds, so it may take slightly longer than the configured timeout to trigger the deregistration. This should generally be configured with a timeout that's much, much longer than any expected recoverable outage for the given service. **EnableTagOverride** can optionally be specified to disable the anti-entropy feature for this service's tags. If **EnableTagOverride** is set to ``True`` then external agents can update this service in the catalog and modify the tags. Subsequent local sync operations by this agent will ignore the updated tags. For instance, if an external agent modified both the tags and the port for this service and **EnableTagOverride** was set to true then after the next sync cycle the service's port would revert to the original value but the tags would maintain the updated value. As a counter example, if an external agent modified both the tags and port for this service and **EnableTagOverride** was set to false then after the next sync cycle the service's port and the tags would revert to the original value and all modifications would be lost. It's important to note that this applies only to the locally registered service. If you have multiple nodes all registering the same service their **EnableTagOverride** configuration and all other service configuration items are independent of one another. Updating the tags for the service registered on one node is independent of the same service (by name) registered on another node. If **EnableTagOverride** is not specified the default value is ``False``.
378,339
def display(self): print( % (self.name, self.start, self.end)) if type(self.rf) is float: print( % (fmtp(self.rf))) print() data = [[fmtp(self.total_return), fmtn(self.daily_sharpe), fmtp(self.cagr), fmtp(self.max_drawdown)]] print(tabulate(data, headers=[, , , ])) print() data = [[fmtp(self.mtd), fmtp(self.three_month), fmtp(self.six_month), fmtp(self.ytd), fmtp(self.one_year), fmtp(self.three_year), fmtp(self.five_year), fmtp(self.ten_year), fmtp(self.incep)]] print(tabulate(data, headers=[, , , , , , , , ])) print() data = [ [, fmtn(self.daily_sharpe), fmtn(self.monthly_sharpe), fmtn(self.yearly_sharpe)], [, fmtp(self.daily_mean), fmtp(self.monthly_mean), fmtp(self.yearly_mean)], [, fmtp(self.daily_vol), fmtp(self.monthly_vol), fmtp(self.yearly_vol)], [, fmtn(self.daily_skew), fmtn(self.monthly_skew), fmtn(self.yearly_skew)], [, fmtn(self.daily_kurt), fmtn(self.monthly_kurt), fmtn(self.yearly_kurt)], [, fmtp(self.best_day), fmtp(self.best_month), fmtp(self.best_year)], [, fmtp(self.worst_day), fmtp(self.worst_month), fmtp(self.worst_year)]] print(tabulate(data, headers=[, , ])) print() data = [ [fmtp(self.max_drawdown), fmtp(self.avg_drawdown), fmtn(self.avg_drawdown_days)]] print(tabulate(data, headers=[, , ])) print() data = [[, fmtp(self.avg_up_month)], [, fmtp(self.avg_down_month)], [, fmtp(self.win_year_perc)], [, fmtp(self.twelve_month_win_perc)]] print(tabulate(data))
Displays an overview containing descriptive stats for the Series provided.
378,340
def get_serialize_format(self, mimetype): format = self.formats.get(mimetype, None) if format is None: format = formats.get(mimetype, None) return format
Get the serialization format for the given mimetype
378,341
def users(self, params): resource = self.RESOURCE_USERS.format(account_id=self.account.id, id=self.id) headers = {: } response = Request(self.account.client, , resource, headers=headers, body=json.dumps(params)).perform() success_count = response.body[][] total_count = response.body[][] return (success_count, total_count)
This is a private API and requires whitelisting from Twitter. This endpoint will allow partners to add, update and remove users from a given tailored_audience_id. The endpoint will also accept multiple user identifier types per user as well.
378,342
def get_method_by_idx(self, idx): if self.__cached_methods_idx == None: self.__cached_methods_idx = {} for i in self.classes.class_def: for j in i.get_methods(): self.__cached_methods_idx[j.get_method_idx()] = j try: return self.__cached_methods_idx[idx] except KeyError: return None
Return a specific method by using an index :param idx: the index of the method :type idx: int :rtype: None or an :class:`EncodedMethod` object
378,343
def save_process(): from MAVProxy.modules.lib import wx_processguard from MAVProxy.modules.lib.wx_loader import wx from MAVProxy.modules.lib.wxgrapheditor import GraphDialog app = wx.App(False) frame = GraphDialog(, mestate.last_graph, save_callback) frame.ShowModal() frame.Destroy()
process for saving a graph
378,344
def p_always(self, p): p[0] = Always(p[2], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
always : ALWAYS senslist always_statement
378,345
def simple_profile(self, sex=None): SEX = ["F", "M"] if sex not in SEX: sex = self.random_element(SEX) if sex == : name = self.generator.name_female() elif sex == : name = self.generator.name_male() return { "username": self.generator.user_name(), "name": name, "sex": sex, "address": self.generator.address(), "mail": self.generator.free_email(), "birthdate": self.generator.date_of_birth(), }
Generates a basic profile with personal informations
378,346
def get_taskfileinfo_selection(self, ): sel = OptionSelector(self.reftrack) sel.exec_() return sel.selected
Return a taskfileinfo that the user chose from the available options :returns: the chosen taskfileinfo :rtype: :class:`jukeboxcore.filesys.TaskFileInfo` :raises: None
378,347
def _get_all_data(self, start_date, end_date): return [self._get_input_data(var, start_date, end_date) for var in _replace_pressure(self.variables, self.dtype_in_vert)]
Get the needed data from all of the vars in the calculation.
378,348
def view_count(self, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/views api_path = "/api/v2/views/{id}/count.json" api_path = api_path.format(id=id) return self.call(api_path, **kwargs)
https://developer.zendesk.com/rest_api/docs/core/views#get-view-count
378,349
def set_buffer(library, session, mask, size): return library.viSetBuf(session, mask, size)
Sets the size for the formatted I/O and/or low-level I/O communication buffer(s). Corresponds to viSetBuf function of the VISA library. :param library: the visa library wrapped by ctypes. :param session: Unique logical identifier to a session. :param mask: Specifies the type of buffer. (Constants.READ_BUF, .WRITE_BUF, .IO_IN_BUF, .IO_OUT_BUF) :param size: The size to be set for the specified buffer(s). :return: return value of the library call. :rtype: :class:`pyvisa.constants.StatusCode`
378,350
def get(self, mac): data = { self._FORMAT_F: , self._SEARCH_F: mac } response = self.__decode_str(self.__call_api(self.__url, data), ) if len(response) > 0: return self.__parse(response) raise EmptyResponseException()
Get data from API as instance of ResponseModel. Keyword arguments: mac -- MAC address or OUI for searching
378,351
def path(self, goal): if goal == self.name: return [self] if goal not in self.routes: raise ValueError("Unknown ".format(goal)) obj = self path = [obj] while True: obj = obj.routes[goal].direction path.append(obj) if obj.name == goal: break return path
Get the shortest way between two nodes of the graph Args: goal (str): Name of the targeted node Return: list of Node
378,352
def read_files(path): template = {} for file_name in os.listdir(path): with open(os.path.join(path, file_name), ) as f: template[file_name] = replace_whitespace( f.read(), insert=True) return template
For a directory full of files, retrieve it as a dict with file_name:text
378,353
def delete_edge(self, ind_node, dep_node): graph = self.graph if dep_node not in graph.get(ind_node, []): raise KeyError( "No edge exists between %s and %s." % (ind_node, dep_node) ) graph[ind_node].remove(dep_node)
Delete an edge from the graph. Args: ind_node (str): The independent node to delete an edge from. dep_node (str): The dependent node that has a dependency on the ind_node. Raises: KeyError: Raised when the edge doesn't already exist.
378,354
def dump_all_keys_or_addrs(wallet_obj): print_traversal_warning() puts() if not confirm(user_prompt=DEFAULT_PROMPT, default=False): puts(colored.red()) return mpub = wallet_obj.serialize_b58(private=False) if wallet_obj.private_key: desc_str = else: desc_str = puts() puts() priv_to_display = % first4mprv_from_mpub(mpub=mpub) print_bcwallet_basic_priv_opening(priv_to_display=priv_to_display) puts( % desc_str) puts() num_keys = get_int( user_prompt=DEFAULT_PROMPT, max_int=10**5, default_input=, show_default=True, quit_ok=True, ) if num_keys is False: return if wallet_obj.private_key: print_childprivkey_warning() puts( * 70) for chain_int in (0, 1): for current in range(0, num_keys): path = "m/%d/%d" % (chain_int, current) if current == 0: if chain_int == 0: print_external_chain() print_key_path_header() elif chain_int == 1: print_internal_chain() print_key_path_header() child_wallet = wallet_obj.get_child_for_path(path) if wallet_obj.private_key: wif_to_use = child_wallet.export_to_wif() else: wif_to_use = None print_path_info( address=child_wallet.to_address(), path=path, wif=wif_to_use, coin_symbol=coin_symbol_from_mkey(mpub), ) puts(colored.blue())
Offline-enabled mechanism to dump addresses
378,355
def _replace_auth_key( user, key, enc=, comment=, options=None, config=): auth_line = _format_auth_line(key, enc, comment, options or []) lines = [] full = _get_config_file(user, config) try: with salt.utils.files.fopen(full, ) as _fh: for line in _fh: )
Replace an existing key
378,356
def render(opts, functions, states=None, proxy=None, context=None): if context is None: context = {} pack = {: functions, : opts.get(, {}), : context} if states: pack[] = states pack[] = proxy or {} ret = LazyLoader( _module_dirs( opts, , , ext_type_dirs=, ), opts, tag=, pack=pack, ) rend = FilterDictWrapper(ret, ) if not check_render_pipe_str(opts[], rend, opts[], opts[]): err = ( .format(opts[])) log.critical(err) raise LoaderError(err) return rend
Returns the render modules
378,357
def build(self): self.load_builtins() self.load_functions(self.tree) self.visit(self.tree)
Generic entrypoint of `SymbolTableBuilder` class.
378,358
def wait(self, timeout=None): self.__stopped.wait(timeout) return self.__stopped.is_set()
Waits for the client to stop its loop
378,359
def check(self, val): if self.type is None: return True is_list = isinstance(val, list) if not self.listable and is_list: return False if self.type == KEY_TYPES.NUMERIC and not is_number(val): return False elif (self.type == KEY_TYPES.TIME and not is_number(val) and not in val and not in val): return False elif self.type == KEY_TYPES.STRING: if is_list: if not isinstance(val[0], basestring): return False elif not isinstance(val, basestring): return False elif self.type == KEY_TYPES.BOOL: if is_list and not isinstance(val[0], bool): return False elif not isinstance(val, bool): return False return True
Make sure given value is consistent with this `Key` specification. NOTE: if `type` is 'None', then `listable` also is *not* checked.
378,360
def onStart(self, *args, **kwarg): with transactUI(self): config = self.navbar.getActiveConfig() config.resetErrors() if config.isValid(): self.clientRunner.run(self.buildCliString()) self.showConsole() else: config.displayErrors() self.Layout()
Verify user input and kick off the client's program if valid
378,361
def get_datarect(self): x1, y1, x2, y2 = self._org_x1, self._org_y1, self._org_x2, self._org_y2 return (x1, y1, x2, y2)
Get the approximate bounding box of the displayed image. Returns ------- rect : tuple Bounding box in data coordinates in the form of ``(x1, y1, x2, y2)``.
378,362
def timezone(zone): rUTCUS/EasternUS/EasternUS/Eastern%Y-%m-%d %H:%M:%S %Z (%z)2002-10-27 01:00:00 EST (-0500)2002-10-27 00:50:00 EST (-0500)2002-10-27 01:50:00 EDT (-0400)2002-10-27 01:10:00 EST (-0500)Asia/Shangri-LaAsia/Shangri-La\N{TRADE MARK SIGN}\u2122 if zone.upper() == : return utc try: zone = zone.encode() except UnicodeEncodeError: raise UnknownTimeZoneError(zone) zone = _unmunge_zone(zone) if zone not in _tzinfo_cache: if resource_exists(zone): _tzinfo_cache[zone] = build_tzinfo(zone, open_resource(zone)) else: raise UnknownTimeZoneError(zone) return _tzinfo_cache[zone]
r''' Return a datetime.tzinfo implementation for the given timezone >>> from datetime import datetime, timedelta >>> utc = timezone('UTC') >>> eastern = timezone('US/Eastern') >>> eastern.zone 'US/Eastern' >>> timezone(u'US/Eastern') is eastern True >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) >>> loc_dt = utc_dt.astimezone(eastern) >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' >>> loc_dt.strftime(fmt) '2002-10-27 01:00:00 EST (-0500)' >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 00:50:00 EST (-0500)' >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:50:00 EDT (-0400)' >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) '2002-10-27 01:10:00 EST (-0500)' Raises UnknownTimeZoneError if passed an unknown zone. >>> timezone('Asia/Shangri-La') Traceback (most recent call last): ... UnknownTimeZoneError: 'Asia/Shangri-La' >>> timezone(u'\N{TRADE MARK SIGN}') Traceback (most recent call last): ... UnknownTimeZoneError: u'\u2122'
378,363
def symmetric_difference_update(self, other): other = set(other) ivs = list(self) for iv in ivs: if iv in other: self.remove(iv) other.remove(iv) self.update(other)
Throws out all intervals except those only in self or other, not both.
378,364
def _update_model(self, completions): cc_model = QtGui.QStandardItemModel() self._tooltips.clear() for completion in completions: name = completion[] item = QtGui.QStandardItem() item.setData(name, QtCore.Qt.DisplayRole) if in completion and completion[]: self._tooltips[name] = completion[] if in completion: icon = completion[] if isinstance(icon, list): icon = QtGui.QIcon.fromTheme(icon[0], QtGui.QIcon(icon[1])) else: icon = QtGui.QIcon(icon) item.setData(QtGui.QIcon(icon), QtCore.Qt.DecorationRole) cc_model.appendRow(item) try: self._completer.setModel(cc_model) except RuntimeError: self._create_completer() self._completer.setModel(cc_model) return cc_model
Creates a QStandardModel that holds the suggestion from the completion models for the QCompleter :param completionPrefix:
378,365
def delete_subscription(self): url = self._build_url(, base_url=self._api) return self._boolean(self._delete(url), 204, 404)
Delete subscription for this thread. :returns: bool
378,366
def _parse_metadata_and_message_count(response): metadata = _parse_metadata(response) metadata.approximate_message_count = _to_int(response.headers.get()) return metadata
Extracts approximate messages count header.
378,367
def get_chunk_ranges(self, symbol, chunk_range=None, reverse=False): sym = self._get_symbol_info(symbol) if not sym: raise NoDataFoundException("Symbol does not exist.") c = CHUNKER_MAP[sym[CHUNKER]] spec = {SYMBOL: symbol, SEGMENT: 0} if chunk_range is not None: spec.update(CHUNKER_MAP[sym[CHUNKER]].to_mongo(chunk_range)) for x in self._collection.find(spec, projection=[START, END], sort=[(START, pymongo.ASCENDING if not reverse else pymongo.DESCENDING)]): yield (c.chunk_to_str(x[START]), c.chunk_to_str(x[END]))
Returns a generator of (Start, End) tuples for each chunk in the symbol Parameters ---------- symbol: str the symbol for the given item in the DB chunk_range: None, or a range object allows you to subset the chunks by range reverse: boolean return the chunk ranges in reverse order Returns ------- generator
378,368
def TimeField(formatter=types.DEFAULT_TIME_FORMAT, default=NOTHING, required=True, repr=True, cmp=True, key=None): default = _init_fields.init_default(required, default, None) validator = _init_fields.init_validator(required, time) converter = converters.to_time_field(formatter) return attrib(default=default, converter=converter, validator=validator, repr=repr, cmp=cmp, metadata=dict(formatter=formatter, key=key))
Create new time field on a model. :param formatter: time formatter string (default: "%H:%M:%S") :param default: any time or string that can be converted to a time value :param bool required: whether or not the object is invalid if not provided. :param bool repr: include this field should appear in object's repr. :param bool cmp: include this field in generated comparison. :param string key: override name of the value when converted to dict.
378,369
def newCatalog(sgml): ret = libxml2mod.xmlNewCatalog(sgml) if ret is None:raise treeError() return catalog(_obj=ret)
create a new Catalog.
378,370
def is_valid(self): self.cleaned_data = {} self.changed_fields = [] self.validated = False self._errors = {} self._named_errors = {} cleaned_data = {} changed_fields = [] errors = {} named_errors = {} for name, subform in self._forms.items(): if not subform.is_valid(): errors[name] = subform._errors named_errors.update(subform._named_errors) continue if subform.has_changed: changed_fields.append(name) for name, formset in self._sets.items(): if not formset.is_valid(): errors[name] = formset._errors named_errors.update(formset._named_errors) continue if formset.has_changed: changed_fields.append(name) for name, field in self._fields.items(): field.error = None py_value = field.validate(self) if field.error: errors[name] = field.error named_errors[field.name] = field.error continue cleaned_data[name] = py_value if hasattr(field, ): cleaned_data[name] = None field.has_changed = True if field.has_changed: changed_fields.append(name) for name, field in self._fields.items(): field.validate(self, cleaned_data) if field.error: errors[name] = field.error named_errors[field.name] = field.error continue if errors: self._errors = errors self._named_errors = named_errors return False self.changed_fields = changed_fields self.cleaned_data = self.clean(cleaned_data) self.validated = True return True
Return whether the current values of the form fields are all valid.
378,371
def readValuesPyBigWig(self, reference, start, end): if not self.checkReference(reference): raise exceptions.ReferenceNameNotFoundException(reference) if start < 0: start = 0 bw = pyBigWig.open(self._sourceFile) referenceLen = bw.chroms(reference) if referenceLen is None: raise exceptions.ReferenceNameNotFoundException(reference) if end > referenceLen: end = referenceLen if start >= end: raise exceptions.ReferenceRangeErrorException( reference, start, end) data = protocol.Continuous() curStart = start curEnd = curStart + self._INCREMENT while curStart < end: if curEnd > end: curEnd = end for i, val in enumerate(bw.values(reference, curStart, curEnd)): if not math.isnan(val): if len(data.values) == 0: data.start = curStart + i data.values.append(val) if len(data.values) == self._MAX_VALUES: yield data data = protocol.Continuous() elif len(data.values) > 0: yield data data = protocol.Continuous() curStart = curEnd curEnd = curStart + self._INCREMENT bw.close() if len(data.values) > 0: yield data
Use pyBigWig package to read a BigWig file for the given range and return a protocol object. pyBigWig returns an array of values that fill the query range. Not sure if it is possible to get the step and span. This method trims NaN values from the start and end. pyBigWig throws an exception if end is outside of the reference range. This function checks the query range and throws its own exceptions to avoid the ones thrown by pyBigWig.
378,372
def closeEvent(self, event): if self.inimodel.get_edited(): r = self.doc_modified_prompt() if r == QtGui.QMessageBox.Yes: event.accept() else: event.ignore() else: event.accept()
Handles closing of the window. If configs were edited, ask user to continue. :param event: the close event :type event: QCloseEvent :returns: None :rtype: None :raises: None
378,373
def on_trial_result(self, trial_runner, trial, result): bracket, _ = self._trial_info[trial] bracket.update_trial_stats(trial, result) if bracket.continue_trial(trial): return TrialScheduler.CONTINUE action = self._process_bracket(trial_runner, bracket, trial) return action
If bracket is finished, all trials will be stopped. If a given trial finishes and bracket iteration is not done, the trial will be paused and resources will be given up. This scheduler will not start trials but will stop trials. The current running trial will not be handled, as the trialrunner will be given control to handle it.
378,374
def _upload_file_aws_cli(local_fname, bucket, keyname, config=None, mditems=None): s3_fname = "s3://%s/%s" % (bucket, keyname) args = ["--sse", "--expected-size", str(os.path.getsize(local_fname))] if config: if config.get("region"): args += ["--region", config.get("region")] if config.get("reduced_redundancy"): args += ["--storage-class", "REDUCED_REDUNDANCY"] cmd = [os.path.join(os.path.dirname(sys.executable), "aws"), "s3", "cp"] + args + \ [local_fname, s3_fname] do.run(cmd, "Upload to s3: %s %s" % (bucket, keyname))
Streaming upload via the standard AWS command line interface.
378,375
def get_mopheader(expnum, ccd, version=, prefix=None): prefix = prefix is None and "" or prefix mopheader_uri = dbimages_uri(expnum=expnum, ccd=ccd, version=version, prefix=prefix, ext=) if mopheader_uri in mopheaders: return mopheaders[mopheader_uri] filename = os.path.basename(mopheader_uri) if os.access(filename, os.F_OK): logger.debug("File already on disk: {}".format(filename)) mopheader_fpt = StringIO(open(filename, ).read()) else: mopheader_fpt = StringIO(open_vos_or_local(mopheader_uri).read()) with warnings.catch_warnings(): warnings.simplefilter(, AstropyUserWarning) mopheader = fits.open(mopheader_fpt) header = mopheader[0].header try: header[] = get_fwhm(expnum, ccd) except IOError: header[] = 10 header[] = mopheader[0].header[] header[] = header[] header[] = header[] header[] = header[] header[] = str(Time(header[], format=, scale=, precision=5).replicate(format=)) header[] = MAXCOUNT mopheaders[mopheader_uri] = header mopheader.close() return mopheaders[mopheader_uri]
Retrieve the mopheader, either from cache or from vospace @param expnum: @param ccd: @param version: @param prefix: @return: Header
378,376
def define_charset(self, code, mode): if code in cs.MAPS: if mode == "(": self.g0_charset = cs.MAPS[code] elif mode == ")": self.g1_charset = cs.MAPS[code]
Define ``G0`` or ``G1`` charset. :param str code: character set code, should be a character from ``"B0UK"``, otherwise ignored. :param str mode: if ``"("`` ``G0`` charset is defined, if ``")"`` -- we operate on ``G1``. .. warning:: User-defined charsets are currently not supported.
378,377
def get_patient_mhc_haplotype(job, patient_dict): haplotype_archive = job.fileStore.readGlobalFile(patient_dict[]) haplotype_archive = untargz(haplotype_archive, os.getcwd()) output_dict = {} for filename in , : output_dict[filename] = job.fileStore.writeGlobalFile(os.path.join(haplotype_archive, filename)) return output_dict
Convenience function to get the mhc haplotype from the patient dict :param dict patient_dict: dict of patient info :return: The MHCI and MHCII haplotypes :rtype: toil.fileStore.FileID
378,378
def _compute_error(self): self._err = np.sum(np.multiply(self._R_k, self._C_k.T), axis=0) - self._d
Evaluate the absolute error of the Nystroem approximation for each column
378,379
def aliased_slot_names(self, slot_names: List[SlotDefinitionName]) -> Set[str]: return {self.aliased_slot_name(sn) for sn in slot_names}
Return the aliased slot names for all members of the list @param slot_names: actual slot names @return: aliases w/ duplicates removed
378,380
def read(self): found = Client.read(self) if self.needs_distribute_ready(): self.distribute_ready() return found
Read some number of messages
378,381
def associate(self, id_option_vip, id_environment_vip): if not is_valid_int_param(id_option_vip): raise InvalidParameterError( u) if not is_valid_int_param(id_environment_vip): raise InvalidParameterError( u) url = + \ str(id_option_vip) + + str(id_environment_vip) + code, xml = self.submit(None, , url) return self.response(code, xml)
Create a relationship of OptionVip with EnvironmentVip. :param id_option_vip: Identifier of the Option VIP. Integer value and greater than zero. :param id_environment_vip: Identifier of the Environment VIP. Integer value and greater than zero. :return: Following dictionary :: {'opcoesvip_ambiente_xref': {'id': < id_opcoesvip_ambiente_xref >} } :raise InvalidParameterError: Option VIP/Environment VIP identifier is null and/or invalid. :raise OptionVipNotFoundError: Option VIP not registered. :raise EnvironmentVipNotFoundError: Environment VIP not registered. :raise OptionVipError: Option vip is already associated with the environment vip. :raise UserNotAuthorizedError: User does not have authorization to make this association. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response.
378,382
def cleanup(self): log.debug() if os.path.exists(self.cache_sock): os.remove(self.cache_sock) if os.path.exists(self.update_sock): os.remove(self.update_sock) if os.path.exists(self.upd_t_sock): os.remove(self.upd_t_sock)
remove sockets on shutdown
378,383
def validate(self, result, spec):
Validate that the result has the correct structure.
378,384
def sourcehook(self, newfile, encoding=): "Hook called on a filename to be sourced." from codecs import open if newfile[0] == : newfile = newfile[1:-1] if isinstance(self.infile, basestring) and not os.path.isabs(newfile): newfile = os.path.join(os.path.dirname(self.infile), newfile) return (newfile, open(newfile, "r", encoding))
Hook called on a filename to be sourced.
378,385
def setup_logging(fail_silently=False): config = None paths = list(get_config_paths(filename=, reversed=True)) for path in paths: if not os.path.exists(path): continue with open(path, ) as f: config = yaml.safe_load(f.read()) LOG_LEVEL = os.environ.get() if LOG_LEVEL: config[][] = LOG_LEVEL.upper() config[][][] = LOG_LEVEL.upper() logging.config.dictConfig(config) break else: if not fail_silently: raise LogconfigError(.format(paths)) return config
Setup logging configuration Finds the most user-facing log config on disk and uses it
378,386
def check(self, password: str) -> bool: return ( pbkdf2_sha512.verify(password, self.password) or pbkdf2_sha512.verify(password, pbkdf2_sha512.encrypt(self.api_key)) )
Checks the given password with the one stored in the database
378,387
def add_trial(self, trial): trial.set_verbose(self._verbose) self._trials.append(trial) with warn_if_slow("scheduler.on_trial_add"): self._scheduler_alg.on_trial_add(self, trial) self.trial_executor.try_checkpoint_metadata(trial)
Adds a new trial to this TrialRunner. Trials may be added at any time. Args: trial (Trial): Trial to queue.
378,388
def summary_plot( pymc_obj, name=, format=, suffix=, path=, alpha=0.05, chain=None, quartiles=True, hpd=True, rhat=True, main=None, xlab=None, x_range=None, custom_labels=None, chain_spacing=0.05, vline_pos=0): if not gridspec: print_( ) return quantiles = [100 * alpha / 2, 50, 100 * (1 - alpha / 2)] if quartiles: quantiles = [100 * alpha / 2, 25, 50, 75, 100 * (1 - alpha / 2)] plotrange = None gs = None interval_plot = None rhat_plot = None try: vars = pymc_obj._variables_to_tally except AttributeError: try: vars = pymc_obj._traces except AttributeError: if isinstance(pymc_obj, Variable): vars = [pymc_obj] else: vars = pymc_obj from .diagnostics import gelman_rubin if rhat: try: R = {} for variable in vars: R[variable.__name__] = gelman_rubin(variable) except (ValueError, TypeError): print( ) rhat = False labels = [] var = 1 if all([v._plot == False for v in vars]): print_() return for variable in vars: if variable._plot == False: continue varname = variable.__name__ if chain is not None: chains = 1 traces = [variable.trace(chain=chain)] else: chains = variable.trace.db.chains traces = [variable.trace(chain=i) for i in range(chains)] if gs is None: if rhat and chains > 1: gs = gridspec.GridSpec(1, 2, width_ratios=[3, 1]) else: gs = gridspec.GridSpec(1, 1) interval_plot = subplot(gs[0]) data = [calc_quantiles(d, quantiles) for d in traces] if hpd: for i, d in enumerate(traces): hpd_interval = calc_hpd(d, alpha) data[i][quantiles[0]] = hpd_interval[0] data[i][quantiles[-1]] = hpd_interval[1] data = [[d[q] for q in quantiles] for d in data] if plotrange: plotrange = [min( plotrange[0], nmin(data)), max(plotrange[1], nmax(data))] else: plotrange = [nmin(data), nmax(data)] try: value = variable.get_stoch_value() except AttributeError: value = variable.value k = size(value) if k > 1: names = var_str(varname, shape(value)[int(shape(value)[0]==1):]) labels += names else: labels.append(varname) e = [0] + [(chain_spacing * ((i + 2) / 2)) * ( -1) ** i for i in range(chains - 1)] for j, quants in enumerate(data): if k > 1: ravelled_quants = list(map(ravel, quants)) for i, quant in enumerate(transpose(ravelled_quants)): q = ravel(quant) y = -(var + i) + e[j] if quartiles: pyplot(q[2], y, , markersize=4) errorbar( x=(q[1], q[3]), y=(y, y), linewidth=2, color="blue") else: pyplot(q[1], y, , markersize=4) errorbar( x=(q[0], q[-1]), y=(y, y), linewidth=1, color="blue") else: y = -var + e[j] if quartiles: pyplot(quants[2], y, , markersize=4) errorbar( x=(quants[1], quants[3]), y=(y, y), linewidth=2, color="blue") else: pyplot(quants[1], y, , markersize=4) errorbar( x=(quants[0], quants[-1]), y=(y, y), linewidth=1, color="blue") var += k if custom_labels is not None: labels = custom_labels left_margin = max([len(x) for x in labels]) * 0.015 gs.update(left=left_margin, right=0.95, top=0.9, bottom=0.05) ylim(-var + 0.5, -0.5) datarange = plotrange[1] - plotrange[0] xlim(plotrange[0] - 0.05 * datarange, plotrange[1] + 0.05 * datarange) yticks([-(l + 1) for l in range(len(labels))], labels) if main is not False: plot_title = main or str(int(( 1 - alpha) * 100)) + "% Credible Intervals" title(plot_title) if xlab is not None: xlabel(xlab) if x_range is not None: xlim(*x_range) for ticks in interval_plot.yaxis.get_major_ticks(): ticks.tick1On = False ticks.tick2On = False for loc, spine in six.iteritems(interval_plot.spines): if loc in [, ]: pass elif loc in [, ]: spine.set_color() savefig("%s%s%s.%s" % (path, name, suffix, format))
Model summary plot Generates a "forest plot" of 100*(1-alpha)% credible intervals for either the set of nodes in a given model, or a specified set of nodes. :Arguments: pymc_obj: PyMC object, trace or array A trace from an MCMC sample or a PyMC object with one or more traces. name (optional): string The name of the object. format (optional): string Graphic output format (defaults to png). suffix (optional): string Filename suffix. path (optional): string Specifies location for saving plots (defaults to local directory). alpha (optional): float Alpha value for (1-alpha)*100% credible intervals (defaults to 0.05). chain (optional): int Where there are multiple chains, specify a particular chain to plot. If not specified (chain=None), all chains are plotted. quartiles (optional): bool Flag for plotting the interquartile range, in addition to the (1-alpha)*100% intervals (defaults to True). hpd (optional): bool Flag for plotting the highest probability density (HPD) interval instead of the central (1-alpha)*100% interval (defaults to True). rhat (optional): bool Flag for plotting Gelman-Rubin statistics. Requires 2 or more chains (defaults to True). main (optional): string Title for main plot. Passing False results in titles being suppressed; passing False (default) results in default titles. xlab (optional): string Label for x-axis. Defaults to no label x_range (optional): list or tuple Range for x-axis. Defaults to matplotlib's best guess. custom_labels (optional): list User-defined labels for each node. If not provided, the node __name__ attributes are used. chain_spacing (optional): float Plot spacing between chains (defaults to 0.05). vline_pos (optional): numeric Location of vertical reference line (defaults to 0).
378,389
def calibrate_counts(array, attributes, index): offset = np.float32(attributes["corrected_counts_offsets"][index]) scale = np.float32(attributes["corrected_counts_scales"][index]) array = (array - offset) * scale return array
Calibration for counts channels.
378,390
def addresses(self): if self._addresses is None: self._addresses = AddressList(self._version, account_sid=self._solution[], ) return self._addresses
Access the addresses :returns: twilio.rest.api.v2010.account.address.AddressList :rtype: twilio.rest.api.v2010.account.address.AddressList
378,391
def unpickle(pickle_file): pickle = None with open(pickle_file, "rb") as pickle_f: pickle = dill.load(pickle_f) if not pickle: LOG.error("Could not load python object from file") return pickle
Unpickle a python object from the given path.
378,392
def interactions_iter(self, nbunch=None, t=None): seen = {} if nbunch is None: nodes_nbrs = self._adj.items() else: nodes_nbrs = ((n, self._adj[n]) for n in self.nbunch_iter(nbunch)) for n, nbrs in nodes_nbrs: for nbr in nbrs: if t is not None: if nbr not in seen and self.__presence_test(n, nbr, t): yield (n, nbr, {"t": [t]}) else: if nbr not in seen: yield (n, nbr, self._adj[n][nbr]) seen[n] = 1 del seen
Return an iterator over the interaction present in a given snapshot. Edges are returned as tuples in the order (node, neighbor). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None the the method returns an iterator over the edges of the flattened graph. Returns ------- edge_iter : iterator An iterator of (u,v) tuples of interaction. See Also -------- interaction : return a list of interaction Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-interaction. Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2], 0) >>> G.add_interaction(2,3,1) >>> [e for e in G.interactions_iter(t=0)] [(0, 1), (1, 2)] >>> list(G.interactions_iter()) [(0, 1), (1, 2), (2, 3)]
378,393
def appendMissingSignatures(self): missing_signatures = self.get("missing_signatures", []) for pub in missing_signatures: wif = self.blockchain.wallet.getPrivateKeyForPublicKey(pub) if wif: self.appendWif(wif)
Store which accounts/keys are supposed to sign the transaction This method is used for an offline-signer!
378,394
def select_good_pixel_region(hits, col_span, row_span, min_cut_threshold=0.2, max_cut_threshold=2.0): hits = np.sum(hits, axis=(-1)).astype() mask = np.ones(shape=(80, 336), dtype=np.uint8) mask[min(col_span):max(col_span) + 1, min(row_span):max(row_span) + 1] = 0 ma = np.ma.masked_where(mask, hits) if max_cut_threshold is not None: return np.ma.masked_where(np.logical_or(ma < min_cut_threshold * np.ma.median(ma), ma > max_cut_threshold * np.ma.median(ma)), ma) else: return np.ma.masked_where(ma < min_cut_threshold * np.ma.median(ma), ma)
Takes the hit array and masks all pixels with a certain occupancy. Parameters ---------- hits : array like If dim > 2 the additional dimensions are summed up. min_cut_threshold : float A number to specify the minimum threshold, which pixel to take. Pixels are masked if occupancy < min_cut_threshold * np.ma.median(occupancy) 0 means that no pixels are masked max_cut_threshold : float A number to specify the maximum threshold, which pixel to take. Pixels are masked if occupancy > max_cut_threshold * np.ma.median(occupancy) Can be set to None that no pixels are masked by max_cut_threshold Returns ------- numpy.ma.array, shape=(80,336) The hits array with masked pixels.
378,395
def get_default(self, ctx): if callable(self.default): rv = self.default() else: rv = self.default return self.type_cast_value(ctx, rv)
Given a context variable this calculates the default value.
378,396
def cleanup_sweep_threads(): for dict_name, obj in globals().items(): if isinstance(obj, (TimedDict,)): logging.info( .format( dict_name=dict_name)) obj.stop_sweep()
Not used. Keeping this function in case we decide not to use daemonized threads and it becomes necessary to clean up the running threads upon exit.
378,397
def explain_feature(featurename): import os import featuremonkey import importlib import subprocess def guess_version(feature_module): if hasattr(feature_module, ): return feature_module.__version__ if hasattr(feature_module, ): return feature_module.get_version() return ( ) def git_rev(module): stdout, stderr = subprocess.Popen( ["git", "rev-parse", "HEAD"], cwd=os.path.dirname(module.__file__), stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate() if in stderr: return else: return stdout.strip() def git_changes(module): stdout = subprocess.Popen( ["git", "diff", "--name-only"], cwd=os.path.dirname(module.__file__), stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate()[0] return stdout.strip() or if featurename in featuremonkey.get_features_from_equation_file(os.environ[]): print() print(featurename) print( * 60) print() is_subfeature = in featurename try: feature_module = importlib.import_module(featurename) except ImportError: print( % featurename) print( % os.path.dirname(feature_module.__file__)) print() if is_subfeature: print() print() else: print( % str(guess_version(feature_module))) print() print( % git_rev(feature_module)) print() print( % .join(git_changes(feature_module).split())) else: print( + featurename)
print the location of single feature and its version if the feature is located inside a git repository, this will also print the git-rev and modified files
378,398
def update(self, status=values.unset, announce_url=values.unset, announce_method=values.unset): data = values.of({: status, : announce_url, : announce_method, }) payload = self._version.update( , self._uri, data=data, ) return ConferenceInstance( self._version, payload, account_sid=self._solution[], sid=self._solution[], )
Update the ConferenceInstance :param ConferenceInstance.UpdateStatus status: The new status of the resource :param unicode announce_url: The URL we should call to announce something into the conference :param unicode announce_method: he HTTP method used to call announce_url :returns: Updated ConferenceInstance :rtype: twilio.rest.api.v2010.account.conference.ConferenceInstance
378,399
def fix_e224(self, result): target = self.source[result[] - 1] offset = result[] - 1 fixed = target[:offset] + target[offset:].replace(, ) self.source[result[] - 1] = fixed
Remove extraneous whitespace around operator.