Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
11,300
def broadcast(*sinks_): @push def bc(): sinks = [s() for s in sinks_] while True: msg = yield for s in sinks: s.send(msg) return bc
The |broadcast| decorator creates a |push| object that receives a message by ``yield`` and then sends this message on to all the given sinks. .. |broadcast| replace:: :py:func:`broadcast`
11,301
def _sb_decoder(self): bloc = self.telnet_sb_buffer if len(bloc) > 2: if bloc[0] == TTYPE and bloc[1] == IS: self.terminal_type = bloc[2:] if bloc[0] == NAWS: if len(bloc) != 5: print "Bad length on NAWS SB:", len(bloc) else: self.columns = (256 * ord(bloc[1])) + ord(bloc[2]) self.rows = (256 * ord(bloc[3])) + ord(bloc[4]) self.telnet_sb_buffer =
Figures out what to do with a received sub-negotiation block.
11,302
def set_calibrated_weights(self): period = self.period survey_scenario = self.survey_scenario assert survey_scenario.simulation is not None for simulation in [survey_scenario.simulation, survey_scenario.baseline_simulation]: if simulation is None: continue simulation.set_input(self.weight_name, period, self.weight)
Modify the weights to use the calibrated weights
11,303
async def states(self, country: str) -> list: data = await self._request( , , params={: country}) return [d[] for d in data[]]
Return a list of supported states in a country.
11,304
def call_listen(self, chunks, running): listeners = [] crefs = {} for chunk in chunks: crefs[(chunk[], chunk[], chunk[])] = chunk if in chunk: listeners.append({(chunk[], chunk[], chunk[]): chunk[]}) if in chunk: for l_in in chunk[]: for key, val in six.iteritems(l_in): listeners.append({(key, val, ): [{chunk[]: chunk[]}]}) mod_watchers = [] errors = {} for l_dict in listeners: for key, val in six.iteritems(l_dict): for listen_to in val: if not isinstance(listen_to, dict): found = False for chunk in chunks: if chunk[] == listen_to or \ chunk[] == listen_to: listen_to = {chunk[]: chunk[]} found = True if not found: continue for lkey, lval in six.iteritems(listen_to): if not any(lkey == cref[0] and lval in cref for cref in crefs): rerror = {_l_tag(lkey, lval): { : .format(lkey, lval), : .format(lkey, lval), : False, : {} }} errors.update(rerror) continue to_tags = [ _gen_tag(data) for cref, data in six.iteritems(crefs) if lkey == cref[0] and lval in cref ] for to_tag in to_tags: if to_tag not in running: continue if running[to_tag][]: if not any(key[0] == cref[0] and key[1] in cref for cref in crefs): rerror = {_l_tag(key[0], key[1]): {: .format(key[0], key[1]), : .format(key[0], key[1]), : False, : {}}} errors.update(rerror) continue new_chunks = [data for cref, data in six.iteritems(crefs) if key[0] == cref[0] and key[1] in cref] for chunk in new_chunks: low = chunk.copy() low[] = chunk[] low[] = low[] = .format(low[]) for req in STATE_REQUISITE_KEYWORDS: if req in low: low.pop(req) mod_watchers.append(low) ret = self.call_chunks(mod_watchers) running.update(ret) for err in errors: errors[err][] = self.__run_num self.__run_num += 1 running.update(errors) return running
Find all of the listen routines and call the associated mod_watch runs
11,305
def form_valid(self, post_form, attachment_formset, **kwargs): save_attachment_formset = attachment_formset is not None \ and not self.preview if self.preview: return self.render_to_response( self.get_context_data( preview=True, post_form=post_form, attachment_formset=attachment_formset, **kwargs ), ) self.forum_post = post_form.save() if save_attachment_formset: attachment_formset.post = self.forum_post attachment_formset.save() messages.success(self.request, self.success_message) if not self.forum_post.approved: messages.warning(self.request, self.approval_required_message) return HttpResponseRedirect(self.get_success_url())
Processes valid forms. Called if all forms are valid. Creates a Post instance along with associated attachments if required and then redirects to a success page.
11,306
def is_action_available(self, action): temp_state = np.rot90(self._state, action) return self._is_action_available_left(temp_state)
Determines whether action is available. That is, executing it would change the state.
11,307
def _AddStopTimeObjectUnordered(self, stoptime, schedule): stop_time_class = self.GetGtfsFactory().StopTime cursor = schedule._connection.cursor() insert_query = "INSERT INTO stop_times (%s) VALUES (%s);" % ( .join(stop_time_class._SQL_FIELD_NAMES), .join([] * len(stop_time_class._SQL_FIELD_NAMES))) cursor = schedule._connection.cursor() cursor.execute( insert_query, stoptime.GetSqlValuesTuple(self.trip_id))
Add StopTime object to this trip. The trip isn't checked for duplicate sequence numbers so it must be validated later.
11,308
def ls(args): assert args.path, "Not finded MAKESITE HOME." print_header("Installed sites:") for site in gen_sites(args.path): LOGGER.debug(site.get_info()) return True
List sites ---------- Show list of installed sites. :: usage: makesite ls [-h] [-v] [-p PATH] Show list of installed sites. optional arguments: -p PATH, --path PATH path to makesite sites instalation dir. you can set it in $makesite_home env variable. Examples: :: makesite ls
11,309
def declare_queue(self, queue_name=, passive=False, durable=False, exclusive=False, auto_delete=False, arguments=None): result = self._channel.queue_declare( queue=queue_name, passive=passive, durable=durable, exclusive=exclusive, auto_delete=auto_delete, arguments=arguments ) return result.method.queue
ε£°ζ˜ŽδΈ€δΈͺι˜Ÿεˆ— :param queue_name: ι˜Ÿεˆ—ε :param passive: :param durable: :param exclusive: :param auto_delete: :param arguments: :return: pika ζ‘†ζžΆη”Ÿζˆηš„ιšζœΊε›žθ°ƒι˜Ÿεˆ—ε
11,310
def _expand_slice(self, indices): keys = list(self.data.keys()) expanded = [] for idx, ind in enumerate(indices): if isinstance(ind, slice) and ind.step is not None: dim_ind = slice(ind.start, ind.stop) if dim_ind == slice(None): condition = self._all_condition() elif dim_ind.start is None: condition = self._upto_condition(dim_ind) elif dim_ind.stop is None: condition = self._from_condition(dim_ind) else: condition = self._range_condition(dim_ind) dim_vals = unique_iterator(k[idx] for k in keys) expanded.append(set([k for k in dim_vals if condition(k)][::int(ind.step)])) else: expanded.append(ind) return tuple(expanded)
Expands slices containing steps into a list.
11,311
def get_grid_points_by_rotations(address_orig, reciprocal_rotations, mesh, is_shift=None, is_dense=False): _set_no_error() if is_shift is None: _is_shift = np.zeros(3, dtype=) else: _is_shift = np.array(is_shift, dtype=) rot_grid_points = np.zeros(len(reciprocal_rotations), dtype=) spg.grid_points_by_rotations( rot_grid_points, np.array(address_orig, dtype=), np.array(reciprocal_rotations, dtype=, order=), np.array(mesh, dtype=), _is_shift) if is_dense: return rot_grid_points else: return np.array(rot_grid_points, dtype=)
Returns grid points obtained after rotating input grid address Parameters ---------- address_orig : array_like Grid point address to be rotated. dtype='intc', shape=(3,) reciprocal_rotations : array_like Rotation matrices {R} with respect to reciprocal basis vectors. Defined by q'=Rq. dtype='intc', shape=(rotations, 3, 3) mesh : array_like dtype='intc', shape=(3,) is_shift : array_like, optional With (1) or without (0) half grid shifts with respect to grid intervals sampled along reciprocal basis vectors. Default is None, which gives [0, 0, 0]. is_dense : bool, optional rot_grid_points is returned with dtype='uintp' if True. Otherwise its dtype='intc'. Default is False. Returns ------- rot_grid_points : ndarray Grid points obtained after rotating input grid address dtype='intc' or 'uintp', shape=(rotations,)
11,312
def add_task(self, task_id, backend, category, backend_args, archive_args=None, sched_args=None): try: archiving_cfg = self.__parse_archive_args(archive_args) scheduling_cfg = self.__parse_schedule_args(sched_args) self.__validate_args(task_id, backend, category, backend_args) except ValueError as e: raise e try: task = self._tasks.add(task_id, backend, category, backend_args, archiving_cfg=archiving_cfg, scheduling_cfg=scheduling_cfg) except AlreadyExistsError as e: raise e self._scheduler.schedule_task(task.task_id) return task
Add and schedule a task. :param task_id: id of the task :param backend: name of the backend :param category: category of the items to fecth :param backend_args: args needed to initialize the backend :param archive_args: args needed to initialize the archive :param sched_args: scheduling args for this task :returns: the task created
11,313
def kmodels(wordlen: int, k: int, input=None, output=None): assert 0 <= k < 2**wordlen if output is None: output = _fresh() if input is None: input = _fresh() input_names = named_indexes(wordlen, input) atoms = map(aiger.atom, input_names) active = False expr = aiger.atom(False) for atom, bit in zip(atoms, encode_int(wordlen, k, signed=False)): active |= bit if not active: continue expr = (expr | atom) if bit else (expr & atom) return aigbv.AIGBV( aig=expr.aig, input_map=frozenset([(input, tuple(input_names))]), output_map=frozenset([(output, (expr.output,))]), )
Return a circuit taking a wordlen bitvector where only k valuations return True. Uses encoding from [1]. Note that this is equivalent to (~x < k). - TODO: Add automated simplification so that the circuits are equiv. [1]: Chakraborty, Supratik, et al. "From Weighted to Unweighted Model Counting." IJCAI. 2015.
11,314
def arguments_to_lists(function): def l_function(*args, **kwargs): l_args = [_to_list(arg) for arg in args] l_kwargs = {} for key, value in kwargs.items(): l_kwargs[key] = _to_list(value) return function(*l_args, **l_kwargs) return l_function
Decorator for a function that converts all arguments to lists. :param function: target function :return: target function with only lists as parameters
11,315
def find_by_id(self, story, params={}, **options): path = "/stories/%s" % (story) return self.client.get(path, params, **options)
Returns the full record for a single story. Parameters ---------- story : {Id} Globally unique identifier for the story. [params] : {Object} Parameters for the request
11,316
def delete_doc_by_id(self, collection, doc_id, **kwargs): if in doc_id: doc_id = .format(doc_id) temp = {"delete": {"query": .format(doc_id)}} resp, con_inf = self.transport.send_request(method=, endpoint=, collection=collection, data=json.dumps(temp), **kwargs) return resp
:param str collection: The name of the collection for the request :param str id: ID of the document to be deleted. Can specify '*' to delete everything. Deletes items from Solr based on the ID. :: >>> solr.delete_doc_by_id('SolrClient_unittest','changeme')
11,317
def bel_process_belrdf(): if request.method == : return {} response = request.body.read().decode() body = json.loads(response) belrdf = body.get() bp = bel.process_belrdf(belrdf) return _stmts_from_proc(bp)
Process BEL RDF and return INDRA Statements.
11,318
def version(): click.echo( % __version__) click.echo( % CUR_API_VERSION) try: r = client.get() except RequestException as ex: raise exc.TowerCLIError( % six.text_type(ex)) config = r.json() license = config.get(, {}).get(, ) if license == : server_type = else: server_type = click.echo( % (server_type, config[])) click.echo( % config[])
Display full version information.
11,319
def _set_link_local_route_oif_type(self, v, load=False): parent = getattr(self, "_parent", None) if parent is not None and load is False: raise AttributeError("Cannot set keys directly when" + " within an instantiated list") if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u: {: 3}, u: {: 4}, u: {: 6}, u: {: 2}, u: {: 1}, u: {: 7}, u: {: 5}},), is_leaf=True, yang_name="link-local-route-oif-type", rest_name="link-local-route-oif-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u}}, is_keyval=True, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "brocade-ipv6-rtm:enumeration", : , }) self.__link_local_route_oif_type = t if hasattr(self, ): self._set()
Setter method for link_local_route_oif_type, mapped from YANG variable /rbridge_id/vrf/address_family/ipv6/unicast/ipv6/route/link_local_static_route_nh/link_local_route_oif_type (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_link_local_route_oif_type is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_link_local_route_oif_type() directly.
11,320
def number_to_dp(number: Optional[float], dp: int, default: Optional[str] = "", en_dash_for_minus: bool = True) -> str: if number is None: return default if number == float("inf"): return u"∞" if number == float("-inf"): s = u"-∞" else: s = u"{:.{precision}f}".format(number, precision=dp) if en_dash_for_minus: s = s.replace("-", u"–") return s
Format number to ``dp`` decimal places, optionally using a UTF-8 en dash for minus signs.
11,321
def refresh_information(self, accept=MEDIA_TYPE_TAXII_V20): response = self.__raw = self._conn.get(self.url, headers={"Accept": accept}) self._populate_fields(**response) self._loaded_information = True
Update the properties of this API Root. This invokes the ``Get API Root Information`` endpoint.
11,322
def _wr_ver_n_key(self, fout_txt, verbose): with open(fout_txt, ) as prt: self._prt_ver_n_key(prt, verbose) print(.format(TXT=fout_txt))
Write GO DAG version and key indicating presence of GO ID in a list.
11,323
def pad_chunk_columns(chunk): columns = set() for record in chunk: columns.update(record.keys()) for record in chunk: for column in columns: record.setdefault(column, None) return chunk
Given a set of items to be inserted, make sure they all have the same columns by padding columns with None if they are missing.
11,324
def inheritsFrom(self, target_name): for t in self.hierarchy: if t and t.getName() == target_name or target_name in t.description.get(, {}): return True return False
Return true if this target inherits from the named target (directly or indirectly. Also returns true if this target is the named target. Otherwise return false.
11,325
def _to_dict(self): _dict = {} if hasattr(self, ) and self.results is not None: _dict[] = [x._to_dict() for x in self.results] if hasattr(self, ) and self.count is not None: _dict[] = self.count return _dict
Return a json dictionary representing this model.
11,326
def IsAllSpent(self): for item in self.Items: if item == CoinState.Confirmed: return False return True
Flag indicating if all balance is spend. Returns: bool:
11,327
def _parse_kexgss_continue(self, m): if not self.transport.server_mode: srv_token = m.get_string() m = Message() m.add_byte(c_MSG_KEXGSS_CONTINUE) m.add_string( self.kexgss.ssh_init_sec_context( target=self.gss_host, recv_token=srv_token ) ) self.transport.send_message(m) self.transport._expect_packet( MSG_KEXGSS_CONTINUE, MSG_KEXGSS_COMPLETE, MSG_KEXGSS_ERROR ) else: pass
Parse the SSH2_MSG_KEXGSS_CONTINUE message. :param `.Message` m: The content of the SSH2_MSG_KEXGSS_CONTINUE message
11,328
def db(self): if self._db is None: if self.tcex.default_args.tc_playbook_db_type == : from .tcex_redis import TcExRedis self._db = TcExRedis( self.tcex.default_args.tc_playbook_db_path, self.tcex.default_args.tc_playbook_db_port, self.tcex.default_args.tc_playbook_db_context, ) elif self.tcex.default_args.tc_playbook_db_type == : from .tcex_key_value import TcExKeyValue self._db = TcExKeyValue(self.tcex) else: err = u.format(self.tcex.default_args.tc_playbook_db_type) raise RuntimeError(err) return self._db
Return the correct KV store for this execution.
11,329
def group_membership_show(self, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/group_memberships api_path = "/api/v2/group_memberships/{id}.json" api_path = api_path.format(id=id) return self.call(api_path, **kwargs)
https://developer.zendesk.com/rest_api/docs/core/group_memberships#show-membership
11,330
def relabel(self, column_label, new_label): if isinstance(column_label, numbers.Integral): column_label = self._as_label(column_label) if isinstance(column_label, str) and isinstance(new_label, str): column_label, new_label = [column_label], [new_label] if len(column_label) != len(new_label): raise ValueError( ) old_to_new = dict(zip(column_label, new_label)) for label in column_label: if not (label in self.labels): raise ValueError( ) rewrite = lambda s: old_to_new[s] if s in old_to_new else s columns = [(rewrite(s), c) for s, c in self._columns.items()] self._columns = collections.OrderedDict(columns) for label in self._formats: if label in column_label: formatter = self._formats.pop(label) self._formats[old_to_new[label]] = formatter return self
Changes the label(s) of column(s) specified by ``column_label`` to labels in ``new_label``. Args: ``column_label`` -- (single str or array of str) The label(s) of columns to be changed to ``new_label``. ``new_label`` -- (single str or array of str): The label name(s) of columns to replace ``column_label``. Raises: ``ValueError`` -- if ``column_label`` is not in table, or if ``column_label`` and ``new_label`` are not of equal length. ``TypeError`` -- if ``column_label`` and/or ``new_label`` is not ``str``. Returns: Original table with ``new_label`` in place of ``column_label``. >>> table = Table().with_columns( ... 'points', make_array(1, 2, 3), ... 'id', make_array(12345, 123, 5123)) >>> table.relabel('id', 'yolo') points | yolo 1 | 12,345 2 | 123 3 | 5,123 >>> table.relabel(make_array('points', 'yolo'), ... make_array('red', 'blue')) red | blue 1 | 12,345 2 | 123 3 | 5,123 >>> table.relabel(make_array('red', 'green', 'blue'), ... make_array('cyan', 'magenta', 'yellow', 'key')) Traceback (most recent call last): ... ValueError: Invalid arguments. column_label and new_label must be of equal length.
11,331
def get_events(self): header = BASE_HEADERS.copy() header[] = self.__cookie request = requests.post(BASE_URL + , headers=header, timeout=10) if request.status_code != 200: self.__logged_in = False self.login() self.get_events() return try: result = request.json() except ValueError as error: raise Exception( "Not a valid result for getEvent," + " protocol error: " + error) return self._get_events(result)
Return a set of events. Which have been occured since the last call of this method. This method should be called regulary to get all occuring Events. There are three different Event types/classes which can be returned: - DeviceStateChangedEvent, if any device changed it's state due to an applied action or just because of other reasons - CommandExecutionStateChangedEvent, a executed command goes through several phases which can be followed - ExecutionStateChangedEvent, ******** todo :return: an array of Events or empty array :rtype: list raises ValueError in case of protocol issues :Seealso: - apply_actions - launch_action_group - get_history
11,332
def confirm_reservation(self, username, domain, password, email=None): self.set_password(username=username, domain=domain, password=password) if email is not None: self.set_email(username=username, domain=domain, email=email)
Confirm a reservation for a username. The default implementation just calls :py:func:`~xmpp_backends.base.XmppBackendBase.set_password` and optionally :py:func:`~xmpp_backends.base.XmppBackendBase.set_email`.
11,333
def _dict_to_object(desired_type: Type[T], contents_dict: Dict[str, Any], logger: Logger, options: Dict[str, Dict[str, Any]], conversion_finder: ConversionFinder = None, is_dict_of_dicts: bool = False) -> T: constructor_args_types_and_opt = get_constructor_attributes_types(desired_type) try: dict_for_init = dict() for attr_name, provided_attr_value in contents_dict.items(): if attr_name in constructor_args_types_and_opt.keys(): attr_type_required = constructor_args_types_and_opt[attr_name][0] attr_type_required = resolve_forward_ref(attr_type_required) if not is_dict_of_dicts: if is_valid_pep484_type_hint(attr_type_required): full_attr_name = get_pretty_type_str(desired_type) + + attr_name dict_for_init[attr_name] = ConversionFinder.try_convert_value(conversion_finder, full_attr_name, provided_attr_value, attr_type_required, logger, options) else: warn("Constructor for type <{t}> has no valid PEP484 Type hint for attribute {att}, trying to " "use the parsed value in the dict directly".format(t=get_pretty_type_str(desired_type), att=attr_name)) dict_for_init[attr_name] = provided_attr_value else: if isinstance(provided_attr_value, dict): if (attr_type_required is None) or (attr_type_required is Parameter.empty): raise TypeInformationRequiredError.create_for_object_attributes(desired_type, attr_name, attr_type_required) elif not is_valid_pep484_type_hint(attr_type_required): raise InvalidPEP484TypeHint.create_for_object_attributes(desired_type, attr_name, attr_type_required) else: dict_for_init[attr_name] = dict_to_object(attr_type_required, provided_attr_value, logger, options, conversion_finder=conversion_finder) else: raise ValueError( + str(desired_type) + \) else: if is_dict_of_dicts and attr_name is : pass else: raise InvalidAttributeNameForConstructorError.create(desired_type, list(set(constructor_args_types_and_opt.keys()) - {}), attr_name) try: return desired_type(**dict_for_init) except Exception as e: raise ObjectInstantiationException.create(desired_type, dict_for_init, e) except TypeError as e: raise CaughtTypeErrorDuringInstantiation.create(desired_type, contents_dict, e)
Utility method to create an object from a dictionary of constructor arguments. Constructor arguments that dont have the correct type are intelligently converted if possible :param desired_type: :param contents_dict: :param logger: :param options: :param conversion_finder: :param is_dict_of_dicts: :return:
11,334
def set_trace(context): try: import ipdb as pdb except ImportError: import pdb print("For best results, pip install ipdb.") print("Variables that are available in the current context:") render = lambda s: template.Template(s).render(context) availables = get_variables(context) pprint(availables) print() print() print() for var in availables: locals()[var] = context[var] pdb.set_trace() return
Start a pdb set_trace inside of the template with the context available as 'context'. Uses ipdb if available.
11,335
def fix_version(context): if not prerequisites_ok(): return lines = codecs.open(, , ).readlines() for index, line in enumerate(lines): if line.startswith(): new_line = % context[] lines[index] = new_line time.sleep(1) codecs.open(, , ).writelines(lines)
Fix the version in metadata.txt Relevant context dict item for both prerelease and postrelease: ``new_version``.
11,336
def _remove(self, obj): for idx, item in enumerate(self._queue): if item == obj: del self._queue[idx] break
Python 2.4 compatibility.
11,337
def conditionally_create_profile(role_name, service_type): if service_type not in INSTANCE_PROFILE_SERVICE_TYPES: print_if_verbose("service type: {} not eligible for instance profile".format(service_type)) return instance_profile = get_instance_profile(role_name) if not instance_profile: print("Create instance profile: {}".format(role_name)) if CONTEXT.commit: try: instance_profile = CLIENTS["iam"].create_instance_profile(InstanceProfileName=role_name) except ClientError as error: fail("Exception creating instance profile named: {} {}".format(role_name, sys.exc_info(), error)) else: print_if_verbose("instance profile already exists: {}".format(role_name)) if instance_profile and not instance_profile_contains_role(instance_profile, role_name): print("Add role: {} to instance profile: {}".format(role_name, role_name)) if CONTEXT.commit: try: CLIENTS["iam"].add_role_to_instance_profile(InstanceProfileName=role_name, RoleName=role_name) except ClientError as error: fail("Exception adding role to instance profile: {} {}".format(role_name, sys.exc_info(), error)) else: print_if_verbose("instance profile already contains role: {}".format(role_name))
Check that there is a 1:1 correspondence with an InstanceProfile having the same name as the role, and that the role is contained in it. Create InstanceProfile and attach to role if needed.
11,338
def build_dated_queryset(self): qs = self.get_dated_queryset() years = self.get_date_list(qs) [self.build_year(dt) for dt in years]
Build pages for all years in the queryset.
11,339
def init_logger(self): self.logger.setLevel(logging.DEBUG) self.logger.propagate = False if self.min_log_level_to_print: level = self.min_log_level_to_print handler_class = logging.StreamHandler self._create_handler(handler_class, level) if self.min_log_level_to_save: level = self.min_log_level_to_save handler_class = logging.handlers.TimedRotatingFileHandler self._create_handler(handler_class, level) if self.min_log_level_to_syslog: level = self.min_log_level_to_syslog handler_class = logging.handlers.SysLogHandler self._create_handler(handler_class, level) if self.min_log_level_to_mail: level = self.min_log_level_to_mail handler_class = AlkiviEmailHandler self._create_handler(handler_class, level) return
Create configuration for the root logger.
11,340
def basicConfig(level=logging.WARNING, transient_level=logging.NOTSET): fmt = "%(asctime)s [%(levelname)s] [%(name)s:%(lineno)d] %(message)s" logging.root.setLevel(transient_level) hand = TransientStreamHandler(level=level) hand.setFormatter(logging.Formatter(fmt)) logging.root.addHandler(hand)
Shortcut for setting up transient logging I am a replica of ``logging.basicConfig`` which installs a transient logging handler to stderr.
11,341
def create(self, user_id, name, department=None, position=None, mobile=None, gender=0, tel=None, email=None, weixin_id=None, extattr=None): user_data = optionaldict() user_data[] = user_id user_data[] = name user_data[] = gender user_data[] = department user_data[] = position user_data[] = mobile user_data[] = tel user_data[] = email user_data[] = weixin_id user_data[] = extattr return self._post( , data=user_data )
εˆ›ε»Ίζˆε‘˜ https://work.weixin.qq.com/api/doc#90000/90135/90195
11,342
def get_api_versions(call=None, kwargs=None): if kwargs is None: kwargs = {} if not in kwargs: raise SaltCloudSystemExit( ) if not in kwargs: raise SaltCloudSystemExit( ) api_versions = [] try: resconn = get_conn(client_type=) provider_query = resconn.providers.get( resource_provider_namespace=kwargs[] ) for resource in provider_query.resource_types: if six.text_type(resource.resource_type) == kwargs[]: resource_dict = resource.as_dict() api_versions = resource_dict[] except CloudError as exc: __utils__[](, exc.message) return api_versions
Get a resource type api versions
11,343
def get_transfer_role(chain_state: ChainState, secrethash: SecretHash) -> Optional[str]: task = chain_state.payment_mapping.secrethashes_to_task.get(secrethash) if not task: return None return role_from_transfer_task(task)
Returns 'initiator', 'mediator' or 'target' to signify the role the node has in a transfer. If a transfer task is not found for the secrethash then the function returns None
11,344
def compile_highstate(self): err = [] top = self.get_top() err += self.verify_tops(top) matches = self.top_matches(top) high, errors = self.render_highstate(matches) err += errors if err: return err return high
Return just the highstate or the errors
11,345
def eeg_microstates_plot(method, path="", extension=".png", show_sensors_position=False, show_sensors_name=False, plot=True, save=True, dpi=150, contours=0, colorbar=False, separate=False): figures = [] names = [] try: microstates = method["microstates_good_fit"] except KeyError: microstates = method["microstates"] for microstate in set(microstates): if microstate != "Bad": values = np.mean(method["data"][np.where(microstates == microstate)], axis=0) values = np.array(values, ndmin=2).T evoked = mne.EvokedArray(values, method["raw.info_example"], 0) fig = evoked.plot_topomap(times=0, title=microstate, size=6, contours=contours, time_format="", show=plot, colorbar=colorbar, show_names=show_sensors_name, sensors=show_sensors_position) figures.append(fig) name = path + "microstate_%s_%s%s%s_%s%i_%s%s" %(microstate, method["data_scale"], method["data_normalize"], method["data_smoothing"], method["feature_reduction_method"], method["n_features"], method["clustering_method"], extension) fig.savefig(name, dpi=dpi) names.append(name) if save is True: image_template = PIL.Image.open(names[0]) X, Y = image_template.size image_template.close() combined = PIL.Image.new(, (int(X*len(set(microstates))/2), int( Y*len(set(microstates))/2))) fig = 0 for x in np.arange(0, len(set(microstates))/2*int(X), int(X)): for y in np.arange(0, len(set(microstates))/2*int(Y), int(Y)): try: newfig = PIL.Image.open(names[fig]) combined.paste(newfig, (int(x), int(y))) newfig.close() except: pass fig += 1 combined_name = path + "microstates_%s%s%s_%s%i_%s%s" %(method["data_scale"], method["data_normalize"], method["data_smoothing"], method["feature_reduction_method"], method["n_features"], method["clustering_method"], extension) combined.save(combined_name) if separate is False or save is False: for name in names: os.remove(name) return(figures)
Plot the microstates.
11,346
def analytic_file(self, new_status, old_status=None): if not old_status: old_status = self.domain_status if "file_to_test" in PyFunceble.INTERN and PyFunceble.INTERN["file_to_test"]: output = ( self.output_parent_dir + PyFunceble.OUTPUTS["analytic"]["directories"]["parent"] + "%s%s" ) if new_status.lower() in PyFunceble.STATUS["list"]["up"]: output = output % ( PyFunceble.OUTPUTS["analytic"]["directories"]["up"], PyFunceble.OUTPUTS["analytic"]["filenames"]["up"], ) Generate("HTTP_Active").info_files() elif new_status.lower() in PyFunceble.STATUS["list"]["potentially_up"]: output = output % ( PyFunceble.OUTPUTS["analytic"]["directories"]["potentially_up"], PyFunceble.OUTPUTS["analytic"]["filenames"]["potentially_up"], ) Generate("potentially_up").info_files() elif new_status.lower() in PyFunceble.STATUS["list"]["suspicious"]: output = output % ( PyFunceble.OUTPUTS["analytic"]["directories"]["suspicious"], PyFunceble.OUTPUTS["analytic"]["filenames"]["suspicious"], ) Generate("suspicious").info_files() else: output = output % ( PyFunceble.OUTPUTS["analytic"]["directories"]["potentially_down"], PyFunceble.OUTPUTS["analytic"]["filenames"]["potentially_down"], ) Generate("potentially_down").info_files() Prints( [ self.tested, old_status, PyFunceble.INTERN["http_code"], PyFunceble.CURRENT_TIME, ], "HTTP", output, True, ).data()
Generate :code:`Analytic/*` files based on the given old and new statuses. :param new_status: The new status of the domain. :type new_status: str :param old_status: The old status of the domain. :type old_status: str
11,347
def com_daltonmaag_check_ufolint(font): import subprocess ufolint_cmd = ["ufolint", font] try: subprocess.check_output(ufolint_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: yield FAIL, ("ufolint failed the UFO source. Output follows :" "\n\n{}\n").format(e.output.decode()) except OSError: yield ERROR, "ufolint is not available!" else: yield PASS, "ufolint passed the UFO source."
Run ufolint on UFO source directory.
11,348
def calculate_query_times(**kwargs): return { "total_time_avg": round(numpy.mean(kwargs["total_times"]), 1), "total_time_min": round(numpy.min(kwargs["total_times"]), 1), "total_time_max": round(numpy.max(kwargs["total_times"]), 1), "total_time_85": round(numpy.percentile(kwargs["total_times"], 85), 1), "execution_time_avg": round(numpy.mean(kwargs["execution_times"]), 1), "execution_time_min": round(numpy.min(kwargs["execution_times"]), 1), "execution_time_max": round(numpy.max(kwargs["execution_times"]), 1), "execution_time_85": round( numpy.percentile(kwargs["execution_times"], 85), 1 ), "execution_time_25": round( numpy.percentile(kwargs["execution_times"], 25), 1 ), "execution_time_std": round(numpy.std(kwargs["execution_times"]), 1), "connect_time_avg": round(numpy.mean(kwargs["connect_times"]), 1), "connect_time_min": round(numpy.min(kwargs["connect_times"]), 1), "connect_time_max": round(numpy.max(kwargs["connect_times"]), 1), "connect_time_85": round( numpy.percentile(kwargs["connect_times"], 85), 1 ), "results_iter_time_avg": round( numpy.mean(kwargs["results_iter_times"]), 1 ), "results_iter_time_min": round( numpy.min(kwargs["results_iter_times"]), 1 ), "results_iter_time_max": round( numpy.max(kwargs["results_iter_times"]), 1 ), "results_iter_time_85": round( numpy.percentile(kwargs["results_iter_times"], 85), 1 ), }
Calculates aggregate query times from all iteration times Kwargs: total_times(list): List of total time calculations execution_times(list): List of execution_time calculations results_iter_times(list): List of results_iter_time calculations connect_times(list): List of connect_time calculations Returns: query_execution(dict): Query times False(bool): The query failed. Exception should be logged.
11,349
def intersection_update(self, other): if not isinstance(other, Set): raise ValueError() if self is other: return for item in list(self.items): if item not in other.items: self.items.remove(item)
Update the set, removing any elements from other which are not in both sets. @param other: the collection of items with which to update the set @type other: Set object
11,350
def parents( self, node ): if in node: index = node[]() parents = list(meliaeloader.children( node, index, )) return parents return []
Retrieve/calculate the set of parents for the given node
11,351
def init_db(drop_all=False, bind=engine): try: if drop_all: Base.metadata.drop_all(bind=bind) Base.metadata.create_all(bind=bind) except OperationalError as err: msg = if msg in err.message: sys.stderr.write(db_user_warning) raise return session
Initialize the database, optionally dropping existing tables.
11,352
def choose_database_name(metadata, config): if config.database_name is not None: return config.database_name if metadata.testing: return f"{metadata.name}_test_db" return f"{metadata.name}_db"
Choose the database name to use. As a default, databases should be named after the service that uses them. In addition, database names should be different between unit testing and runtime so that there is no chance of a unit test dropping a real database by accident.
11,353
def hashable_to_uuid(hashable_): bytes_ = _ensure_hashable_bytes(hashable_) try: bytes_sha1 = hashlib.sha1(bytes_) except TypeError: print( % (hashable_,)) print( % (bytes_,)) raise hashbytes_20 = bytes_sha1.digest() hashbytes_16 = hashbytes_20[0:16] uuid_ = uuid.UUID(bytes=hashbytes_16) return uuid_
TODO: ensure that python2 and python3 agree on hashes of the same information Args: hashable_ (hashable): hashables are bytes-like objects An object that supports the Buffer Protocol, like bytes, bytearray or memoryview. Bytes-like objects can be used for various operations that expect binary data, such as compression, saving to a binary file or sending over a socket. Some operations need the binary data to be mutable, in which case not all bytes-like objects can apply. Returns: UUID: uuid_ CommandLine: python -m utool.util_hash --test-hashable_to_uuid Example: >>> # ENABLE_DOCTEST >>> from utool.util_hash import * # NOQA >>> import utool as ut >>> hashables = [ >>> 'foobar', >>> 'foobar'.encode('utf-8'), >>> u'foobar', >>> 10, >>> [1, 2, 3], >>> ] >>> uuids = [] >>> for hashable_ in hashables: >>> uuid_ = hashable_to_uuid(hashable_) >>> uuids.append(uuid_) >>> result = ut.repr4(ut.lmap(str, uuids), strvals=True, nobr=True) >>> print(result) 8843d7f9-2416-211d-e9eb-b963ff4ce281, 8843d7f9-2416-211d-e9eb-b963ff4ce281, 8843d7f9-2416-211d-e9eb-b963ff4ce281, e864ece8-8880-43b6-7277-c8b2cefe96ad, a01eda32-e4e0-b139-3274-e91d1b3e9ecf,
11,354
def number_observer(t=None, targets=None): from ecell4_base.core import NumberObserver, FixedIntervalNumberObserver, TimingNumberObserver if t is None: return NumberObserver(targets) elif isinstance(t, numbers.Number): return FixedIntervalNumberObserver(t, targets) elif hasattr(t, ): if targets is not None: return TimingNumberObserver(t, targets) else: return TimingNumberObserver(t) else: raise TypeError("An invalid type was given. Either number or iterable is required.")
Return a number observer. If t is None, return NumberObserver. If t is a number, return FixedIntervalNumberObserver. If t is an iterable (a list of numbers), return TimingNumberObserver. Parameters ---------- t : float, list or tuple, optional. default None A timing of the observation. See above. targets : list or tuple, optional. default None A list of strings suggesting Species observed. Returns ------- obs : NumberObserver, FixedIntervalNumberObserver or TimingNumberObserver
11,355
def save(self, *args, **kwargs): self.uid = .format(self.slug) super(ElectionType, self).save(*args, **kwargs)
**uid**: :code:`electiontype:{name}`
11,356
def gen_salt_and_hash(val=None): if not val: val = random_str() str_salt = random_str() str_hash = hashlib.sha256(val + str_salt).hexdigest() return str_salt, str_hash
Generate a salt & hash If no string is provided then a random string will be used to hash & referred to as `val`. The salt will always be randomly generated & the hash will be a sha256 hex value of the `val` & the salt as a concatenated string. It follows the guidance here: crackstation.net/hashing-security.htm#properhashing :param val: str :return: tuple of strings (salt, hash)
11,357
def rectwv_coeff_add_longslit_model(rectwv_coeff, geometry, debugplot=0): logger = logging.getLogger(__name__) grism_name = rectwv_coeff.tags[] logger.info( + grism_name) filter_name = rectwv_coeff.tags[] logger.info( + filter_name) list_valid_islitlets = list(range(1, EMIR_NBARS + 1)) for idel in rectwv_coeff.missing_slitlets: list_valid_islitlets.remove(idel) if abs(debugplot) >= 10: print(, list_valid_islitlets) csu_bar_slit_center_list = [] for islitlet in list_valid_islitlets: csu_bar_slit_center_list.append( rectwv_coeff.contents[islitlet - 1][] ) if abs(debugplot) >= 10: logger.debug() summary(np.array(csu_bar_slit_center_list), debug=True) pause_debugplot(debugplot) poldeg_refined_list = [] for islitlet in list_valid_islitlets: poldeg_refined_list.append( len(rectwv_coeff.contents[islitlet - 1][]) - 1 ) poldeg_refined_list = list(set(poldeg_refined_list)) if len(poldeg_refined_list) != 1: raise ValueError( + str(poldeg_refined_list)) poldeg_refined = poldeg_refined_list[0] list_poly = [] for i in range(poldeg_refined + 1): xp = [] yp = [] for islitlet in list_valid_islitlets: tmp_dict = rectwv_coeff.contents[islitlet - 1] wpoly_coeff = tmp_dict[] if wpoly_coeff is not None: xp.append(tmp_dict[]) yp.append(wpoly_coeff[i]) poly, yres, reject = polfit_residuals_with_sigma_rejection( x=np.array(xp), y=np.array(yp), deg=2, times_sigma_reject=5, xlabel=, ylabel= + str(i) + , title="Fit to refined wavelength calibration coefficients", geometry=geometry, debugplot=debugplot ) list_poly.append(poly) for islitlet in list_valid_islitlets: tmp_dict = rectwv_coeff.contents[islitlet - 1] y0_reference_middle = tmp_dict[] list_new_coeff = [] for i in range(poldeg_refined + 1): new_coeff = list_poly[i](y0_reference_middle) list_new_coeff.append(new_coeff) tmp_dict[] = list_new_coeff order_fmap_list = [] for islitlet in list_valid_islitlets: order_fmap_list.append( rectwv_coeff.contents[islitlet - 1][] ) order_fmap_list = list(set(order_fmap_list)) if len(order_fmap_list) != 1: raise ValueError() order_fmap = order_fmap_list[0] list_poly_ttd_aij = [] list_poly_ttd_bij = [] list_poly_tti_aij = [] list_poly_tti_bij = [] ncoef_ttd = ncoef_fmap(order_fmap) for i in range(ncoef_ttd): xp = [] yp_ttd_aij = [] yp_ttd_bij = [] yp_tti_aij = [] yp_tti_bij = [] for islitlet in list_valid_islitlets: tmp_dict = rectwv_coeff.contents[islitlet - 1] ttd_aij = tmp_dict[] ttd_bij = tmp_dict[] tti_aij = tmp_dict[] tti_bij = tmp_dict[] if ttd_aij is not None: xp.append(tmp_dict[]) yp_ttd_aij.append(ttd_aij[i]) yp_ttd_bij.append(ttd_bij[i]) yp_tti_aij.append(tti_aij[i]) yp_tti_bij.append(tti_bij[i]) poly, yres, reject = polfit_residuals_with_sigma_rejection( x=np.array(xp), y=np.array(yp_ttd_aij), deg=5, times_sigma_reject=5, xlabel=, ylabel= + str(i) + , geometry=geometry, debugplot=debugplot ) list_poly_ttd_aij.append(poly) poly, yres, reject = polfit_residuals_with_sigma_rejection( x=np.array(xp), y=np.array(yp_ttd_bij), deg=5, times_sigma_reject=5, xlabel=, ylabel= + str(i) + , geometry=geometry, debugplot=debugplot ) list_poly_ttd_bij.append(poly) poly, yres, reject = polfit_residuals_with_sigma_rejection( x=np.array(xp), y=np.array(yp_tti_aij), deg=5, times_sigma_reject=5, xlabel=, ylabel= + str(i) + , geometry=geometry, debugplot=debugplot ) list_poly_tti_aij.append(poly) poly, yres, reject = polfit_residuals_with_sigma_rejection( x=np.array(xp), y=np.array(yp_tti_bij), deg=5, times_sigma_reject=5, xlabel=, ylabel= + str(i) + , geometry=geometry, debugplot=debugplot ) list_poly_tti_bij.append(poly) for islitlet in list_valid_islitlets: tmp_dict = rectwv_coeff.contents[islitlet - 1] y0_reference_middle = tmp_dict[] tmp_dict[] = order_fmap ttd_aij_longslit_model = [] ttd_bij_longslit_model = [] tti_aij_longslit_model = [] tti_bij_longslit_model = [] for i in range(ncoef_ttd): new_coeff = list_poly_ttd_aij[i](y0_reference_middle) ttd_aij_longslit_model.append(new_coeff) new_coeff = list_poly_ttd_bij[i](y0_reference_middle) ttd_bij_longslit_model.append(new_coeff) new_coeff = list_poly_tti_aij[i](y0_reference_middle) tti_aij_longslit_model.append(new_coeff) new_coeff = list_poly_tti_bij[i](y0_reference_middle) tti_bij_longslit_model.append(new_coeff) tmp_dict[] = ttd_aij_longslit_model tmp_dict[] = ttd_bij_longslit_model tmp_dict[] = tti_aij_longslit_model tmp_dict[] = tti_bij_longslit_model rectwv_coeff.uuid = str(uuid4()) rectwv_coeff.meta_info[] = datetime.now().isoformat() return rectwv_coeff
Compute longslit_model coefficients for RectWaveCoeff object. Parameters ---------- rectwv_coeff : RectWaveCoeff instance Rectification and wavelength calibration coefficients for a particular CSU configuration corresponding to a longslit observation. geometry : TBD debugplot : int Debugging level for messages and plots. For details see 'numina.array.display.pause_debugplot.py'. Returns ------- rectwv_coeff : RectWaveCoeff instance Updated object with longslit_model coefficients computed.
11,358
def mark_quality(self, start_time, length, qual_name): y_pos = BARS[][] height = 10 old_score = self.scene.itemAt(start_time + length / 2, y_pos + height - 1, self.transform()) if old_score is not None and old_score.pen() == NoPen: lg.debug(.format(start_time)) self.scene.removeItem(old_score) self.idx_annot.remove(old_score) if qual_name == : rect = QGraphicsRectItem(start_time, y_pos, length, height) rect.setPen(NoPen) rect.setBrush(Qt.black) self.scene.addItem(rect) self.idx_annot.append(rect)
Mark signal quality, only add the new ones. Parameters ---------- start_time : int start time in s of the epoch being scored. length : int duration in s of the epoch being scored. qual_name : str one of the stages defined in global stages.
11,359
def to_representation(self, instance): request = self.context[] enterprise_customer = instance.enterprise_customer representation = super(EnterpriseCustomerCatalogDetailSerializer, self).to_representation(instance) paginated_content = instance.get_paginated_content(request.GET) count = paginated_content[] search_results = paginated_content[] for item in search_results: content_type = item[] marketing_url = item.get() if marketing_url: item[] = utils.update_query_parameters( marketing_url, utils.get_enterprise_utm_context(enterprise_customer) ) if content_type == : item[] = instance.get_course_enrollment_url(item[]) if content_type == : item[] = instance.get_course_run_enrollment_url(item[]) if content_type == : item[] = instance.get_program_enrollment_url(item[]) previous_url = None next_url = None page = int(request.GET.get(, )) request_uri = request.build_absolute_uri() if paginated_content[]: previous_url = utils.update_query_parameters(request_uri, {: page - 1}) if paginated_content[]: next_url = utils.update_query_parameters(request_uri, {: page + 1}) representation[] = count representation[] = previous_url representation[] = next_url representation[] = search_results return representation
Serialize the EnterpriseCustomerCatalog object. Arguments: instance (EnterpriseCustomerCatalog): The EnterpriseCustomerCatalog to serialize. Returns: dict: The EnterpriseCustomerCatalog converted to a dict.
11,360
def base_taskname(taskname, packagename=None): if not isinstance(taskname, str): return taskname indx = taskname.rfind() if indx >= 0: base_taskname = taskname[(indx+1):] pkg_name = taskname[:indx] else: base_taskname = taskname pkg_name = assert(True if packagename is None else (packagename == pkg_name)) return base_taskname
Extract the base name of the task. Many tasks in the `drizzlepac` have "compound" names such as 'drizzlepac.sky'. This function will search for the presence of a dot in the input `taskname` and if found, it will return the string to the right of the right-most dot. If a dot is not found, it will return the input string. Parameters ---------- taskname : str, None Full task name. If it is `None`, :py:func:`base_taskname` will return `None`\ . packagename : str, None (Default = None) Package name. It is assumed that a compound task name is formed by concatenating `packagename` + '.' + `taskname`\ . If `packagename` is not `None`, :py:func:`base_taskname` will check that the string to the left of the right-most dot matches `packagename` and will raise an `AssertionError` if the package name derived from the input `taskname` does not match the supplied `packagename`\ . This is intended as a check for discrepancies that may arise during the development of the tasks. If `packagename` is `None`, no such check will be performed. Raises ------ AssertionError Raised when package name derived from the input `taskname` does not match the supplied `packagename`
11,361
def __get_query_filters(cls, filters={}, inverse=False): query_filters = [] for name in filters: if name[0] == and not inverse: continue if name[0] != and inverse: continue field_name = name[1:] if name[0] == else name params = {field_name: filters[name]} query_filters.append(Q(, **params)) return query_filters
Convert a dict with the filters to be applied ({"name1":"value1", "name2":"value2"}) to a list of query objects which can be used together in a query using boolean combination logic. :param filters: dict with the filters to be applied :param inverse: if True include all the inverse filters (the one starting with *) :return: a list of es_dsl 'MatchPhrase' Query objects Ex: [MatchPhrase(name1="value1"), MatchPhrase(name2="value2"), ..] Dict representation of the object: {'match_phrase': {'field': 'home'}}
11,362
def blit(self, surface, pos=(0, 0)): for x in range(surface.width): for y in range(surface.height): point = (x + pos[0], y + pos[1]) if self.point_on_screen(point): self.matrix[point[0]][point[1]] = surface.matrix[x][y]
Blits a surface on the screen at pos :param surface: Surface to blit :param pos: Top left corner to start blitting :type surface: Surface :type pos: tuple
11,363
def disable_metrics_collection(self, as_group, metrics=None): params = {: as_group} if metrics: self.build_list_params(params, metrics, ) return self.get_status(, params)
Disables monitoring of group metrics for the Auto Scaling group specified in AutoScalingGroupName. You can specify the list of affected metrics with the Metrics parameter.
11,364
def router_main(self): while True: gotpkt = True try: timestamp,dev,pkt = self.net.recv_packet(timeout=1.0) except NoPackets: log_debug("No packets available in recv_packet") gotpkt = False except Shutdown: log_debug("Got shutdown signal") break if gotpkt: log_debug("Got a packet: {}".format(str(pkt)))
Main method for router; we stay in a loop in this method, receiving packets until the end of time.
11,365
def _validate_pending_children(self): for n in self.pending_children: assert n.state in (NODE_PENDING, NODE_EXECUTING), \ (str(n), StateString[n.state]) assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents)) for p in n.waiting_parents: assert p.ref_count > 0, (str(n), str(p), p.ref_count)
Validate the content of the pending_children set. Assert if an internal error is found. This function is used strictly for debugging the taskmaster by checking that no invariants are violated. It is not used in normal operation. The pending_children set is used to detect cycles in the dependency graph. We call a "pending child" a child that is found in the "pending" state when checking the dependencies of its parent node. A pending child can occur when the Taskmaster completes a loop through a cycle. For example, let's imagine a graph made of three nodes (A, B and C) making a cycle. The evaluation starts at node A. The Taskmaster first considers whether node A's child B is up-to-date. Then, recursively, node B needs to check whether node C is up-to-date. This leaves us with a dependency graph looking like:: Next candidate \ \ Node A (Pending) --> Node B(Pending) --> Node C (NoState) ^ | | | +-------------------------------------+ Now, when the Taskmaster examines the Node C's child Node A, it finds that Node A is in the "pending" state. Therefore, Node A is a pending child of node C. Pending children indicate that the Taskmaster has potentially loop back through a cycle. We say potentially because it could also occur when a DAG is evaluated in parallel. For example, consider the following graph:: Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ... | ^ | | +----------> Node D (NoState) --------+ / Next candidate / The Taskmaster first evaluates the nodes A, B, and C and starts building some children of node C. Assuming, that the maximum parallel level has not been reached, the Taskmaster will examine Node D. It will find that Node C is a pending child of Node D. In summary, evaluating a graph with a cycle will always involve a pending child at one point. A pending child might indicate either a cycle or a diamond-shaped DAG. Only a fraction of the nodes ends-up being a "pending child" of another node. This keeps the pending_children set small in practice. We can differentiate between the two cases if we wait until the end of the build. At this point, all the pending children nodes due to a diamond-shaped DAG will have been properly built (or will have failed to build). But, the pending children involved in a cycle will still be in the pending state. The taskmaster removes nodes from the pending_children set as soon as a pending_children node moves out of the pending state. This also helps to keep the pending_children set small.
11,366
def import_locations(self, cells_file): self._cells_file = cells_file field_names = (, , , , , , , , , , ) parse_date = lambda s: datetime.datetime.strptime(s, ) field_parsers = (int, float, float, int, int, int, int, int, int, parse_date, parse_date) data = utils.prepare_csv_read(cells_file, field_names) for row in data: try: cell = dict((n, p(row[n])) for n, p in zip(field_names, field_parsers)) except ValueError: if r"\N" in row.values(): logging.debug( % row) break else: raise utils.FileFormatError() else: self[row[]] = Cell(**cell)
Parse OpenCellID.org data files. ``import_locations()`` returns a dictionary with keys containing the OpenCellID.org_ database identifier, and values consisting of a ``Cell`` objects. It expects cell files in the following format:: 22747,52.0438995361328,-0.2246370017529,234,33,2319,647,0,1, 2008-04-05 21:32:40,2008-04-05 21:32:40 22995,52.3305015563965,-0.2255620062351,234,10,20566,4068,0,1, 2008-04-05 21:32:59,2008-04-05 21:32:59 23008,52.3506011962891,-0.2234109938145,234,10,10566,4068,0,1, 2008-04-05 21:32:59,2008-04-05 21:32:59 The above file processed by ``import_locations()`` will return the following ``dict`` object:: {23008: Cell(23008, 52.3506011963, -0.223410993814, 234, 10, 10566, 4068, 0, 1, datetime.datetime(2008, 4, 5, 21, 32, 59), datetime.datetime(2008, 4, 5, 21, 32, 59)), 22747: Cell(22747, 52.0438995361, -0.224637001753, 234, 33, 2319, 647, 0, 1, datetime.datetime(2008, 4, 5, 21, 32, 40), datetime.datetime(2008, 4, 5, 21, 32, 40)), 22995: Cell(22995, 52.3305015564, -0.225562006235, 234, 10, 20566, 4068, 0, 1, datetime.datetime(2008, 4, 5, 21, 32, 59), datetime.datetime(2008, 4, 5, 21, 32, 59))} Args: cells_file (iter): Cell data to read Returns: dict: Cell data with their associated database identifier .. _OpenCellID.org: http://opencellid.org/
11,367
def doesnt_have(self, relation, boolean=, extra=None): return self.has(relation, , 1, boolean, extra)
Add a relationship count to the query. :param relation: The relation to count :type relation: str :param boolean: The boolean value :type boolean: str :param extra: The extra query :type extra: Builder or callable :rtype: Builder
11,368
def entity(self, entity_id, get_files=False, channel=None, include_stats=True, includes=None): s id either as a reference or a string @param get_files Whether to fetch the files for the charm or not. @param channel Optional channel name. @param include_stats Optionally disable stats collection. @param includes An optional list of meta info to include, as a sequence of strings. If None, the default include list is used. manifestmanifeststatsstats') return self._meta(entity_id, includes, channel=channel)
Get the default data for any entity (e.g. bundle or charm). @param entity_id The entity's id either as a reference or a string @param get_files Whether to fetch the files for the charm or not. @param channel Optional channel name. @param include_stats Optionally disable stats collection. @param includes An optional list of meta info to include, as a sequence of strings. If None, the default include list is used.
11,369
def to_even_columns(data, headers=None): result = col_width = max(len(word) for row in data for word in row) + 2 if headers: header_width = max(len(word) for row in headers for word in row) + 2 if header_width > col_width: col_width = header_width result += "".join(word.ljust(col_width) for word in headers) + "\n" result += * col_width * len(headers) + "\n" for row in data: result += "".join(word.ljust(col_width) for word in row) + "\n" return result
Nicely format the 2-dimensional list into evenly spaced columns
11,370
def _argument_adapter(callback): def wrapper(*args, **kwargs): if kwargs or len(args) > 1: callback(Arguments(args, kwargs)) elif args: callback(args[0]) else: callback(None) return wrapper
Returns a function that when invoked runs ``callback`` with one arg. If the function returned by this function is called with exactly one argument, that argument is passed to ``callback``. Otherwise the args tuple and kwargs dict are wrapped in an `Arguments` object.
11,371
def get_next_step(self): if self.parent.step_kw_purpose.\ selected_purpose() == layer_purpose_hazard: new_step = self.parent.step_kw_hazard_category else: if is_raster_layer(self.parent.layer): new_step = self.parent.step_kw_band_selector else: new_step = self.parent.step_kw_layermode return new_step
Find the proper step when user clicks the Next button. :returns: The step to be switched to :rtype: WizardStep instance or None
11,372
def get_two_parameters(self, regex_exp, parameters): Rx, Ry, other = self.get_parameters(regex_exp, parameters) if other is not None and other.strip(): raise iarm.exceptions.ParsingError("Extra arguments found: {}".format(other)) if Rx and Ry: return Rx.upper(), Ry.upper() elif not Rx: raise iarm.exceptions.ParsingError("Missing first positional argument") else: raise iarm.exceptions.ParsingError("Missing second positional argument")
Get two parameters from a given regex expression Raise an exception if more than two were found :param regex_exp: :param parameters: :return:
11,373
def _unescape(self, msg): if isinstance(msg, (int, float, long)): return msg unescaped = i = 0 while i < len(msg): unescaped += msg[i] if msg[i] == : i+=1 i+=1 return unescaped
Removes double quotes that were used to escape double quotes. Expects a string without its delimiting quotes, or a number. Returns a new unescaped string.
11,374
def _ensure_unicode_string(string): if not isinstance(string, six.text_type): string = string.decode() return string
Returns a unicode string for string. :param string: The input string. :type string: `basestring` :returns: A unicode string. :rtype: `unicode`
11,375
def from_secrets_file(client_secrets, storage=None, flags=None, storage_path=None, api_version="v3", readonly=False, http_client=None, ga_hook=None): scope = GOOGLE_API_SCOPE_READONLY if readonly else GOOGLE_API_SCOPE flow = flow_from_clientsecrets(client_secrets, scope=scope) storage = _get_storage(storage, storage_path) credentials = storage.get() if credentials is None or credentials.invalid: credentials = run_flow(flow, storage, flags) return Client(_build(credentials, api_version, http_client), ga_hook)
Create a client for a web or installed application. Create a client with a client secrets file. Args: client_secrets: str, path to the client secrets file (downloadable from Google API Console) storage: oauth2client.client.Storage, a Storage implementation to store credentials. storage_path: str, path to a file storage. readonly: bool, default False, if True only readonly access is requested from GA. http_client: httplib2.Http, Override the default http client used. ga_hook: function, a hook that is called every time a query is made against GA.
11,376
def shutdown(self): result = _lib.SSL_shutdown(self._ssl) if result < 0: self._raise_ssl_error(self._ssl, result) elif result > 0: return True else: return False
Send the shutdown message to the Connection. :return: True if the shutdown completed successfully (i.e. both sides have sent closure alerts), False otherwise (in which case you call :meth:`recv` or :meth:`send` when the connection becomes readable/writeable).
11,377
def _evalTimeStr(self, datetimeString, sourceTime): s = datetimeString.strip() sourceTime = self._evalDT(datetimeString, sourceTime) if s in self.ptc.re_values[]: self.currentContext.updateAccuracy(pdtContext.ACU_NOW) else: sTime = self.ptc.getSource(s, sourceTime) if sTime: sourceTime = sTime self.currentContext.updateAccuracy(pdtContext.ACU_HALFDAY) return sourceTime
Evaluate text passed by L{_partialParseTimeStr()}
11,378
def diffusionAddCountsFromSource(grph, source, target, nodeType = , extraType = None, diffusionLabel = , extraKeys = None, countsDict = None, extraMapping = None): progArgs = (0, "Starting to add counts to graph") if metaknowledge.VERBOSE_MODE: progKwargs = { : False} else: progKwargs = { : True} with _ProgressBar(*progArgs, **progKwargs) as PBar: PBar.updateVal(0, ) if countsDict is None: countsDict = diffusionCount(source, target, sourceType = nodeType, extraValue = extraType, _ProgBar = PBar, extraMapping = extraMapping) try: if not isinstance(countsDict.keys().__iter__().__next__(), str): PBar.updateVal(.5, "Prepping the counts") newCountsDict = {} while True: try: k, v = countsDict.popitem() except KeyError: break newCountsDict[str(k)] = v countsDict = newCountsDict except StopIteration: pass count = 0 for n in grph.nodes_iter(): PBar.updateVal(.5 + .5 * (count / len(grph)), "Adding count for ".format(n)) if extraType is not None: if extraKeys: for key in extraKeys: grph.node[n][key] = 0 grph.node[n][diffusionLabel] = 0 try: for k, v in countsDict[n].items(): if k == : grph.node[n][diffusionLabel] = v else: if k: grph.node[n][k] = v except KeyError: grph.node[n][diffusionLabel] = 0 else: grph.node[n][diffusionLabel] = countsDict.get(n, 0) count += 1 PBar.finish("Done adding diffusion counts to a graph") return countsDict
Does a diffusion using [diffusionCount()](#metaknowledge.diffusion.diffusionCount) and updates _grph_ with it, using the nodes in the graph as keys in the diffusion, i.e. the source. The name of the attribute the counts are added to is given by _diffusionLabel_. If the graph is not composed of citations from the source and instead is another tag _nodeType_ needs to be given the tag string. # Parameters _grph_ : `networkx Graph` > The graph to be updated _source_ : `RecordCollection` > The `RecordCollection` that created _grph_ _target_ : `RecordCollection` > The `RecordCollection` that will be counted _nodeType_ : `optional [str]` > default `'citations'`, the tag that constants the values used to create _grph_ # Returns `dict[:int]` > The counts dictioanry used to add values to _grph_. *Note* _grph_ is modified by the function and the return is done in case you need it.
11,379
def get_library(self, username, status=None): r = self._query_( % username, , params={: status}) results = [LibraryEntry(item) for item in r.json()] return results
Fetches a users library. :param str username: The user to get the library from. :param str status: only return the items with the supplied status. Can be one of `currently-watching`, `plan-to-watch`, `completed`, `on-hold` or `dropped`. :returns: List of Library objects.
11,380
def _let_to_py_ast(ctx: GeneratorContext, node: Let) -> GeneratedPyAST: assert node.op == NodeOp.LET with ctx.new_symbol_table("let"): let_body_ast: List[ast.AST] = [] for binding in node.bindings: init_node = binding.init assert init_node is not None init_ast = gen_py_ast(ctx, init_node) binding_name = genname(munge(binding.name)) let_body_ast.extend(init_ast.dependencies) let_body_ast.append( ast.Assign( targets=[ast.Name(id=binding_name, ctx=ast.Store())], value=init_ast.node, ) ) ctx.symbol_table.new_symbol( sym.symbol(binding.name), binding_name, LocalType.LET ) let_result_name = genname("let_result") body_ast = _synthetic_do_to_py_ast(ctx, node.body) let_body_ast.extend(map(statementize, body_ast.dependencies)) let_body_ast.append( ast.Assign( targets=[ast.Name(id=let_result_name, ctx=ast.Store())], value=body_ast.node, ) ) return GeneratedPyAST( node=ast.Name(id=let_result_name, ctx=ast.Load()), dependencies=let_body_ast )
Return a Python AST Node for a `let*` expression.
11,381
def show(self, index): if self.menu and self.menu.parent: self.text = "Return to %s menu" % self.menu.parent.title else: self.text = "Exit" return super(ExitItem, self).show(index)
This class overrides this method
11,382
def get_meta(self, key=None): if self.is_fake: return {} if key == "tag": return self.tag elif key is None: ret = {} for key in self.journal.info.keys(): ret[key] = self.meta_mappings.map_get(self.journal.info, key)[1] return ret else: key, value = self.meta_mappings.map_get(self.journal.info, key) return value
Get metadata value for collection.
11,383
def parse_template(input_filename, output_filename=): data = load_input() with open(input_filename, ) as file: template = file.read().decode("utf-8") if not in data: raise ValueError("Could not find in data") for field in data[]: subs = ["filename", "value"] if isinstance(data[][field], dict) and "filename" in data[][field] and "value" in data[][field] else [""] for sub in subs: displayed_field = field + (":" if sub else "") + sub regex = re.compile("@([^@]*)@" + displayed_field + ) for prefix, postfix in set(regex.findall(template)): if sub == "value": text = open(data[][field][sub], ).read().decode() elif sub: text = data[][field][sub] else: text = data[][field] rep = "\n".join([prefix + v + postfix for v in text.splitlines()]) template = template.replace("@{0}@{1}@{2}@".format(prefix, displayed_field, postfix), rep) if output_filename == : output_filename=input_filename try: os.makedirs(os.path.dirname(output_filename)) except OSError as e: pass with open(output_filename, ) as file: file.write(template.encode("utf-8"))
Parses a template file Replaces all occurences of @@problem_id@@ by the value of the 'problem_id' key in data dictionary input_filename: file to parse output_filename: if not specified, overwrite input file
11,384
def _validate_arguments(self): super(SplineTerm, self)._validate_arguments() if self.basis not in self._bases: raise ValueError("basis must be one of {}, "\ "but found: {}".format(self._bases, self.basis)) self.n_splines = check_param(self.n_splines, param_name=, dtype=, constraint=) self.spline_order = check_param(self.spline_order, param_name=, dtype=, constraint=) if not self.n_splines > self.spline_order: raise ValueError(\ \ .format(self.n_splines, self.spline_order)) if self.by is not None: self.by = check_param(self.by, param_name=, dtype=, constraint=) return self
method to sanitize model parameters Parameters --------- None Returns ------- None
11,385
def _getScalesRand(self): if self.P>1: scales = [] for term_i in range(self.n_randEffs): _scales = sp.randn(self.diag[term_i].shape[0]) if self.jitter[term_i]>0: _scales = sp.concatenate((_scales,sp.array([sp.sqrt(self.jitter[term_i])]))) scales.append(_scales) scales = sp.concatenate(scales) else: scales=sp.randn(self.vd.getNumberScales()) return scales
Internal function for parameter initialization Return a vector of random scales
11,386
def get_process_by_id(self, process_id): route_values = {} if process_id is not None: route_values[] = self._serialize.url(, process_id, ) response = self._send(http_method=, location_id=, version=, route_values=route_values) return self._deserialize(, response)
GetProcessById. [Preview API] Get a process by ID. :param str process_id: ID for a process. :rtype: :class:`<Process> <azure.devops.v5_1.core.models.Process>`
11,387
def get_hashhash(self, username): return hashlib.sha256( self.users.get_hash(username) ).hexdigest()
Generate a digest of the htpasswd hash
11,388
def do_stack(self, arg): if arg: raise CmdError("too many arguments") pid, tid = self.get_process_and_thread_ids_from_prefix() process = self.get_process(pid) thread = process.get_thread(tid) try: stack_trace = thread.get_stack_trace_with_labels() if stack_trace: print(CrashDump.dump_stack_trace_with_labels(stack_trace),) else: print("No stack trace available for thread (%d)" % tid) except WindowsError: print("Can't get stack trace for thread (%d)" % tid)
[~thread] k - show the stack trace [~thread] stack - show the stack trace
11,389
def _get_healthmgr_cmd(self): healthmgr_main_class = healthmgr_cmd = [os.path.join(self.heron_java_home, ), , , , , , , , , , , , , , , , , , , , self.health_manager_classpath, healthmgr_main_class, "--cluster", self.cluster, "--role", self.role, "--environment", self.environment, "--topology_name", self.topology_name, "--metricsmgr_port", self.metrics_manager_port] return Command(healthmgr_cmd, self.shell_env)
get the command to start the topology health manager processes
11,390
def to_eaf(self, skipempty=True, pointlength=0.1): from pympi.Elan import Eaf eaf_out = Eaf() if pointlength <= 0: raise ValueError() for tier in self.get_tiers(): eaf_out.add_tier(tier.name) for ann in tier.get_intervals(True): if tier.tier_type == : ann = (ann[0], ann[0]+pointlength, ann[1]) if ann[2].strip() or not skipempty: eaf_out.add_annotation(tier.name, int(round(ann[0]*1000)), int(round(ann[1]*1000)), ann[2]) return eaf_out
Convert the object to an pympi.Elan.Eaf object :param int pointlength: Length of respective interval from points in seconds :param bool skipempty: Skip the empty annotations :returns: :class:`pympi.Elan.Eaf` object :raises ImportError: If the Eaf module can't be loaded. :raises ValueError: If the pointlength is not strictly positive.
11,391
def hostname(self): from six.moves.urllib.parse import urlparse return urlparse(self._base_url).netloc.split(, 1)[0]
Get the hostname that this connection is associated with
11,392
def _expand_formula_(formula_string): formula_string = re.sub(r, , formula_string) hydrate_pos = formula_string.find() if hydrate_pos >= 0: formula_string = _expand_hydrate_(hydrate_pos, formula_string) search_result = re.search( r, formula_string) if search_result is None: return formula_string this_start = search_result.start() this_end = search_result.end() this_string = search_result.group() this_expansion_array = re.findall( r, this_string) for a in this_expansion_array: if a[1] == "": a = (a[0], 1) parenth_expanded = "" multiplier = float(a[1]) element_array = re.findall(, a[0]) for e in element_array: occurance_array = re.findall(, e) if len(occurance_array) == 0: occurance_array.append(1) for o in occurance_array: symbol = re.findall(, e) total_num = float(o) * multiplier if total_num.is_integer(): total_num = int(total_num) total_str = str(total_num) if total_str == "1": total_str = "" new_string = symbol[0] + total_str parenth_expanded += new_string formula_string = formula_string[0:this_start] + \ parenth_expanded + formula_string[this_end:] return _expand_formula_(formula_string)
Accounts for the many ways a user may write a formula string, and returns an expanded chemical formula string. Assumptions: -The Chemical Formula string it is supplied is well-written, and has no hanging parethneses -The number of repeats occurs after the elemental symbol or ) ] character EXCEPT in the case of a hydrate where it is assumed to be in front of the first element -All hydrates explicitly use the Β· symbol -Only (, (,[, ], ., Β· are "important" symbols to intrepreting the string. -IONS ARE NOT HANDLED :param formula_string: a messy chemical formula string :return: a non-emperical but expanded formula string
11,393
def _ffn_layer_multi_inputs(inputs_list, hparams, ffn_layer_type="dense", name="ffn", kernel_initializer=None, bias_initializer=None, activation=None, pad_remover=None, preprocess=False, postprocess=False): num_inputs = len(inputs_list) assert num_inputs > 0 if preprocess and num_inputs == 1: inputs_list[0] = common_layers.layer_preprocess(inputs_list[0], hparams) if postprocess: original_inputs = inputs_list[0] main_input = inputs_list[0] original_shape = common_layers.shape_list(main_input) assert hparams.hidden_size == common_layers.shape_list(main_input)[-1] for inputs in inputs_list: main_input.get_shape().assert_is_compatible_with(inputs.get_shape()) def remove_pads(x): original_shape = common_layers.shape_list(x) x = tf.reshape(x, tf.concat([[-1], original_shape[2:]], axis=0)) x = tf.expand_dims(pad_remover.remove(x), axis=0) return x if pad_remover: for i, inputs in enumerate(inputs_list): inputs_list[i] = remove_pads(inputs) ffn_inputs = inputs_list[0] if len(inputs_list) != 1: ffn_inputs = tf.concat(inputs_list, axis=-1) if ffn_layer_type == "dense": output = common_layers.dense( ffn_inputs, hparams.hidden_size, name=name, activation=activation, use_bias=True, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer) elif ffn_layer_type == "dense_dropconnect": output = common_layers.dense_dropconnect( ffn_inputs, hparams.hidden_size, name=name, dropconnect_dropout=hparams.dropconnect_dropout, output_activation=activation) postprocess = False elif ffn_layer_type == "dense_relu_dense": output = common_layers.dense_relu_dense( ffn_inputs, hparams.filter_size, hparams.hidden_size, name=name, dropout=hparams.relu_dropout, output_activation=activation, ) else: raise ValueError("Unknown ffn_layer type: %s" % ffn_layer_type) if pad_remover: output = tf.reshape( pad_remover.restore(tf.squeeze(output, axis=0)), original_shape) if postprocess: if num_inputs == 1: output = common_layers.layer_postprocess(original_inputs, output, hparams) else: hp = copy.copy(hparams) hp.layer_postprocess_sequence = hp.layer_postprocess_sequence.replace( "a", "") output = common_layers.layer_postprocess(original_inputs, output, hp) return output
Implements a Feed-forward layer with multiple inputs, pad-removing, etc. Args: inputs_list: list of input tensors hparams: hyper-parameters ffn_layer_type: dense / dense_dropconnect/ dense_relu_dense name: name kernel_initializer: kernel initializer bias_initializer: bias initializer activation: activation function pad_remover: pad remover preprocess: if preprocess the input postprocess: if postprocess the output Returns: a tensor Raises: ValueError: Unknown ffn_layer type.
11,394
def remove_exit(self): if self.items: if self.items[-1] is self.exit_item: del self.items[-1] return True return False
Remove the exit item if necessary. Used to make sure we only remove the exit item, not something else. Returns: bool: True if item needed to be removed, False otherwise.
11,395
def to_yaml(template, clean_up=False, long_form=False): data = load_json(template) if clean_up: data = clean(data) return dump_yaml(data, clean_up, long_form)
Assume the input is JSON and convert to YAML
11,396
def recent_comments(context): latest = context["settings"].COMMENTS_NUM_LATEST comments = ThreadedComment.objects.all().select_related("user") context["comments"] = comments.order_by("-id")[:latest] return context
Dashboard widget for displaying recent comments.
11,397
def write(self, frames): with HDFStore(self._path, , complevel=self._complevel, complib=self._complib) \ as store: panel = pd.Panel.from_dict(dict(frames)) panel.to_hdf(store, ) with tables.open_file(self._path, mode=) as h5file: h5file.set_node_attr(, , 0)
Write the frames to the target HDF5 file, using the format used by ``pd.Panel.to_hdf`` Parameters ---------- frames : iter[(int, DataFrame)] or dict[int -> DataFrame] An iterable or other mapping of sid to the corresponding OHLCV pricing data.
11,398
def _refresh_html_home(self): req = self._parent.client.get(HOME_ENDPOINT) if req.status_code == 403: self._parent.login() self.update() elif req.status_code == 200: self._parent.html[] = generate_soup_html(req.text) else: req.raise_for_status()
Function to refresh the self._parent.html['home'] object which provides the status if zones are scheduled to start automatically (program_toggle).
11,399
def handle_message_registered(self, msg_data, host): response = None if msg_data["method"] == "EVENT": logger.debug("<%s> <euuid:%s> Event message " "received" % (msg_data["cuuid"], msg_data["euuid"])) response = self.event(msg_data["cuuid"], host, msg_data["euuid"], msg_data["event_data"], msg_data["timestamp"], msg_data["priority"]) elif msg_data["method"] == "OK EVENT": logger.debug("<%s> <euuid:%s> Event confirmation message " "received" % (msg_data["cuuid"], msg_data["euuid"])) try: del self.event_uuids[msg_data["euuid"]] except KeyError: logger.warning("<%s> <euuid:%s> Euuid does not exist in event " "buffer. Key was removed before we could process " "it." % (msg_data["cuuid"], msg_data["euuid"])) elif msg_data["method"] == "OK NOTIFY": logger.debug("<%s> <euuid:%s> Ok notify " "received" % (msg_data["cuuid"], msg_data["euuid"])) try: del self.event_uuids[msg_data["euuid"]] except KeyError: logger.warning("<%s> <euuid:%s> Euuid does not exist in event " "buffer. Key was removed before we could process " "it." % (msg_data["cuuid"], msg_data["euuid"])) return response
Processes messages that have been delivered by a registered client. Args: msg (string): The raw packet data delivered from the listener. This data will be unserialized and then processed based on the packet's method. host (tuple): The (address, host) tuple of the source message. Returns: A response that will be sent back to the client via the listener.