Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
9,700
def paged_object_to_list(paged_object): paged_return = [] while True: try: page = next(paged_object) paged_return.append(page.as_dict()) except CloudError: raise except StopIteration: break return paged_return
Extract all pages within a paged object as a list of dictionaries
9,701
def replace_name(file_path, new_name): if not file_path: raise Exception("File path cannot be empty") elif not new_name: raise Exception("New name cannot be empty") dirname = os.path.dirname(file_path) ext = os.path.splitext(os.path.basename(file_path))[1] return os.path.join(dirname, new_name + ext)
Change the file name in a path but keep the extension
9,702
def proj4_to_epsg(projection): def make_definition(value): return {x.strip().lower() for x in value.split() if x} match = EPSG_RE.search(projection.srs) if match: return int(match.group(1)) pyproj_data_dir = os.path.join(os.path.dirname(pyproj.__file__), ) pyproj_epsg_file = os.path.join(pyproj_data_dir, ) if os.path.exists(pyproj_epsg_file): definition = make_definition(projection.srs) f = open(pyproj_epsg_file, ) for line in f.readlines(): match = PYPROJ_EPSG_FILE_RE.search(line) if match: file_definition = make_definition(match.group(2)) if definition == file_definition: return int(match.group(1)) return None
Attempts to convert a PROJ4 projection object to an EPSG code and returns None if conversion fails
9,703
def call_action(self, service_name, action_name, **kwargs): action = self.services[service_name].actions[action_name] return action.execute(**kwargs)
Executes the given action. Raise a KeyError on unkown actions.
9,704
def unpack(self, buff, offset=0): super().unpack(buff, offset) try: self.oxm_field = self._unpack_oxm_field() except ValueError as exception: raise UnpackException(exception) self.oxm_hasmask = (self.oxm_field_and_mask & 1) == 1 start = offset + 4 end = start + self.oxm_length self.oxm_value = buff[start:end]
Unpack the buffer into a OxmTLV. Args: buff (bytes): The binary data to be unpacked. offset (int): If we need to shift the beginning of the data.
9,705
def _set_route_target_evpn(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("action target_community",route_target_evpn.route_target_evpn, yang_name="route-target-evpn", rest_name="route-target", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: None, u: None, u: u, u: None, u: u}}), is_container=, yang_name="route-target-evpn", rest_name="route-target", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: None, u: None, u: u, u: None, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__route_target_evpn = t if hasattr(self, ): self._set()
Setter method for route_target_evpn, mapped from YANG variable /vrf/address_family/ipv6/unicast/route_target_container_ipv6/route_target_evpn (list) If this variable is read-only (config: false) in the source YANG file, then _set_route_target_evpn is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_route_target_evpn() directly.
9,706
def complete_reminder(self, reminder_id, complete_dict): return self._create_put_request( resource=REMINDERS, billomat_id=reminder_id, command=COMPLETE, send_data=complete_dict )
Completes a reminder :param complete_dict: the complete dict with the template id :param reminder_id: the reminder id :return: Response
9,707
def _generate_new_address(self, creator=None) -> str: if creator: return "0x" + str(mk_contract_address(creator, 0).hex()) while True: address = "0x" + "".join([str(hex(randint(0, 16)))[-1] for _ in range(40)]) if address not in self.accounts.keys(): return address
Generates a new address for the global state. :return:
9,708
def insert(self, context): module_file = open(context.resolve(self.__path), "rb") data = { "name": self.__name } if self.__context_root is not None: data["contextroot"] = self.__context_root status_code, msg = self.__endpoint.post( "/applications/application", data=data, files={ "id": module_file }, timeout=60. ) module_file.close() self.__available = True
Deploy application. :param resort.engine.execution.Context context: Current execution context.
9,709
def _resolve_api_id(self): apis = __salt__[](name=self.rest_api_name, description=_Swagger.AWS_API_DESCRIPTION, **self._common_aws_args).get() if apis: if len(apis) == 1: self.restApiId = apis[0].get() else: raise ValueError( .format(self.rest_api_name, self.info_json))
returns an Api Id that matches the given api_name and the hardcoded _Swagger.AWS_API_DESCRIPTION as the api description
9,710
def share_network(network_id, usernames, read_only, share,**kwargs): user_id = kwargs.get() net_i = _get_network(network_id) net_i.check_share_permission(user_id) if read_only == : write = share = else: write = if net_i.created_by != int(user_id) and share == : raise HydraError("Cannot share the ability as user %s is not" " the owner of network %s"% (user_id, network_id)) for username in usernames: user_i = _get_user(username) net_i.set_owner(user_i.id, write=write, share=share) for o in net_i.project.owners: if o.user_id == user_i.id: break else: net_i.project.set_owner(user_i.id, write=, share=) db.DBSession.flush()
Share a network with a list of users, identified by their usernames. The read_only flag ('Y' or 'N') must be set to 'Y' to allow write access or sharing. The share flat ('Y' or 'N') must be set to 'Y' to allow the project to be shared with other users
9,711
def add_mismatch(self, entity, *traits): for trait in traits: self.index[trait].add(entity)
Add a mismatching entity to the index. We do this by simply adding the mismatch to the index. :param collections.Hashable entity: an object to be mismatching the values of `traits_indexed_by` :param list traits: a list of hashable traits to index the entity with
9,712
def resume(self): if self.get_state() != Target.TARGET_HALTED: logging.debug() return self.notify(Notification(event=Target.EVENT_PRE_RUN, source=self, data=Target.RUN_TYPE_RESUME)) self._run_token += 1 self.clear_debug_cause_bits() self.write_memory(CortexM.DHCSR, CortexM.DBGKEY | CortexM.C_DEBUGEN) self.flush() self.notify(Notification(event=Target.EVENT_POST_RUN, source=self, data=Target.RUN_TYPE_RESUME))
resume the execution
9,713
def set_context(self, data): for key in data: setattr(self.local_context, key, data[key])
Load Context with data
9,714
def order_by(self, field, orientation=): if isinstance(field, list): self.raw_order_by.append(field) else: self.raw_order_by.append([field, orientation]) return self
Indica los campos y el criterio de ordenamiento
9,715
def QueueQueryAndOwn(self, queue, lease_seconds, limit, timestamp): try: lock = DB.LockRetryWrapper(queue, lease_time=lease_seconds) return self._QueueQueryAndOwn( lock.subject, lease_seconds=lease_seconds, limit=limit, timestamp=timestamp) except DBSubjectLockError: return [] except Error as e: logging.warning("Datastore exception: %s", e) return []
Returns a list of Tasks leased for a certain time. Args: queue: The queue to query from. lease_seconds: The tasks will be leased for this long. limit: Number of values to fetch. timestamp: Range of times for consideration. Returns: A list of GrrMessage() objects leased.
9,716
def _raise_on_mode(self, mode): valid_modes = [ , , , , , , ] if mode not in valid_modes: raise ValueError( .format(mode, valid_modes) )
Checks that the provided query mode is one of the accepted values. If not, raises a :obj:`ValueError`.
9,717
def getStats(self): if self._stats == None: assert self._mode == self._FILE_READ_MODE inFile = open(self._filename, self._FILE_READ_MODE) reader = csv.reader(inFile, dialect="excel") names = [n.strip() for n in reader.next()] types = [t.strip() for t in reader.next()] reader.next() self._stats = dict() self._stats[] = [] self._stats[] = [] for i in xrange(len(names)): self._stats[].append(None) self._stats[].append(None) while True: try: line = reader.next() for i, f in enumerate(line): if (len(types) > i and types[i] in [FieldMetaType.integer, FieldMetaType.float] and f not in self._missingValues): value = self._adapters[i](f) if self._stats[][i] == None or \ self._stats[][i] < value: self._stats[][i] = value if self._stats[][i] == None or \ self._stats[][i] > value: self._stats[][i] = value except StopIteration: break return self._stats
Parse the file using dedicated reader and collect fields stats. Never called if user of :class:`~.FileRecordStream` does not invoke :meth:`~.FileRecordStream.getStats` method. :returns: a dictionary of stats. In the current implementation, min and max fields are supported. Example of the return dictionary is: .. code-block:: python { 'min' : [f1_min, f2_min, None, None, fn_min], 'max' : [f1_max, f2_max, None, None, fn_max] } (where fx_min/fx_max are set for scalar fields, or None if not)
9,718
def do_classdesc(self, parent=None, ident=0): clazz = JavaClass() log_debug("[classdesc]", ident) class_name = self._readString() clazz.name = class_name log_debug("Class name: %s" % class_name, ident) serialVersionUID, classDescFlags = self._readStruct(">qB") clazz.serialVersionUID = serialVersionUID clazz.flags = classDescFlags self._add_reference(clazz, ident) log_debug( "Serial: 0x{0:X} / {0:d} - classDescFlags: 0x{1:X} {2}".format( serialVersionUID, classDescFlags, OpCodeDebug.flags(classDescFlags) ), ident, ) (length,) = self._readStruct(">H") log_debug("Fields num: 0x{0:X}".format(length), ident) clazz.fields_names = [] clazz.fields_types = [] for fieldId in range(length): (typecode,) = self._readStruct(">B") field_name = self._readString() field_type = self._convert_char_to_type(typecode) log_debug("> Reading field {0}".format(field_name), ident) if field_type == self.TYPE_ARRAY: _, field_type = self._read_and_exec_opcode( ident=ident + 1, expect=(self.TC_STRING, self.TC_REFERENCE) ) if type(field_type) is not JavaString: raise AssertionError( "Field type must be a JavaString, " "not {0}".format(type(field_type)) ) elif field_type == self.TYPE_OBJECT: _, field_type = self._read_and_exec_opcode( ident=ident + 1, expect=(self.TC_STRING, self.TC_REFERENCE) ) if type(field_type) is JavaClass: field_type = JavaString(field_type.name) if type(field_type) is not JavaString: raise AssertionError( "Field type must be a JavaString, " "not {0}".format(type(field_type)) ) log_debug( "< FieldName: 0x{0:X} Name:{1} Type:{2} ID:{3}".format( typecode, field_name, field_type, fieldId ), ident, ) assert field_name is not None assert field_type is not None clazz.fields_names.append(field_name) clazz.fields_types.append(field_type) if parent: parent.__fields = clazz.fields_names parent.__types = clazz.fields_types (opid,) = self._readStruct(">B") log_debug( "OpCode: 0x{0:X} -- {1} (classAnnotation)".format( opid, OpCodeDebug.op_id(opid) ), ident, ) if opid != self.TC_ENDBLOCKDATA: raise NotImplementedError("classAnnotation isn't implemented yet") log_debug("Reading Super Class of {0}".format(clazz.name), ident) _, superclassdesc = self._read_and_exec_opcode( ident=ident + 1, expect=(self.TC_CLASSDESC, self.TC_NULL, self.TC_REFERENCE) ) log_debug( "Super Class for {0}: {1}".format(clazz.name, str(superclassdesc)), ident ) clazz.superclass = superclassdesc return clazz
Handles a TC_CLASSDESC opcode :param parent: :param ident: Log indentation level :return: A JavaClass object
9,719
def update_cursor(self, dc, grid, row, col): old_row, old_col = self.old_cursor_row_col bgcolor = get_color(config["background_color"]) self._draw_cursor(dc, grid, old_row, old_col, pen=wx.Pen(bgcolor), brush=wx.Brush(bgcolor)) self._draw_cursor(dc, grid, row, col)
Whites out the old cursor and draws the new one
9,720
def element_type(self): if not self.is_pointer: raise ValueError("Type {} is not a pointer".format(self)) return TypeRef(ffi.lib.LLVMPY_GetElementType(self))
Returns the pointed-to type. When the type is not a pointer, raises exception.
9,721
def Clamond(Re, eD, fast=False): r X1 = eD*Re*0.1239681863354175460160858261654858382699 X2 = log(Re) - 0.7793974884556819406441139701653776731705 F = X2 - 0.2 X1F = X1 + F X1F1 = 1. + X1F E = (log(X1F) - 0.2)/(X1F1) F = F - (X1F1 + 0.5*E)*E*(X1F)/(X1F1 + E*(1. + E*0.3333333333333333)) if not fast: X1F = X1 + F X1F1 = 1. + X1F E = (log(X1F) + F - X2)/(X1F1) F = F - (X1F1 + 0.5*E)*E*(X1F)/(X1F1 + E*(1. + E*0.3333333333333333)) return 1.325474527619599502640416597148504422899/(F*F)
r'''Calculates Darcy friction factor using a solution accurate to almost machine precision. Recommended very strongly. For details of the algorithm, see [1]_. Parameters ---------- Re : float Reynolds number, [-] eD : float Relative roughness, [-] fast : bool, optional If true, performs only one iteration, which gives roughly half the number of decimals of accuracy, [-] Returns ------- fd : float Darcy friction factor [-] Notes ----- This is a highly optimized function, 4 times faster than the solution using the LambertW function, and faster than many other approximations which are much less accurate. The code used here is only slightly modified than that in [1]_, for further performance improvements. For 10 < Re < 1E12, and 0 < eD < 0.01, this equation has been confirmed numerically to provide a solution to the Colebrook equation accurate to an rtol of 1E-9 or better - the same level of accuracy as the analytical solution to the Colebrook equation due to floating point precision. Comparing this to the numerical solution of the Colebrook equation, identical values are given accurate to an rtol of 1E-9 for 10 < Re < 1E100, and 0 < eD < 1 and beyond. However, for values of Re under 10, different answers from the `Colebrook` equation appear and then quickly a ValueError is raised. Examples -------- >>> Clamond(1E5, 1E-4) 0.01851386607747165 References ---------- .. [1] Clamond, Didier. "Efficient Resolution of the Colebrook Equation." Industrial & Engineering Chemistry Research 48, no. 7 (April 1, 2009): 3665-71. doi:10.1021/ie801626g. http://math.unice.fr/%7Edidierc/DidPublis/ICR_2009.pdf
9,722
def _execute(self, api_command, *, timeout=None): if api_command.observe: self._observe(api_command) return method = api_command.method path = api_command.path data = api_command.data parse_json = api_command.parse_json url = api_command.url(self._host) proc_timeout = self._timeout if timeout is not None: proc_timeout = timeout command = self._base_command(method) kwargs = { : subprocess.DEVNULL, : proc_timeout, : True, } if data is not None: kwargs[] = json.dumps(data) command.append() command.append() _LOGGER.debug(, self._host, method, path, data) else: _LOGGER.debug(, self._host, method, path) command.append(url) try: return_value = subprocess.check_output(command, **kwargs) except subprocess.TimeoutExpired: raise RequestTimeout() from None except subprocess.CalledProcessError as err: raise RequestError( .format(err)) from None api_command.result = _process_output(return_value, parse_json) return api_command.result
Execute the command.
9,723
def _init_unique_sets(self): ks = dict() for t in self._unique_checks: key = t[0] ks[key] = set() return ks
Initialise sets used for uniqueness checking.
9,724
def update(self, _attributes=None, **attributes): if _attributes is not None: attributes.update(_attributes) if self._related.uses_timestamps(): attributes[self.get_related_updated_at()] = self._related.fresh_timestamp() return self._query.update(attributes)
Perform an update on all the related models. :param attributes: The attributes :type attributes: dict :rtype: int
9,725
def read_history_file(self, filename=None): u if filename is None: filename = self.history_filename try: for line in open(filename, u): self.add_history(lineobj.ReadLineTextBuffer(ensure_unicode(line.rstrip()))) except IOError: self.history = [] self.history_cursor = 0
u'''Load a readline history file.
9,726
def get_documents(self, subtypes=None, refresh=False): search = ScopusSearch(.format(self.identifier), refresh) if subtypes: return [p for p in search.results if p.subtype in subtypes] else: return search.results
Return list of author's publications using ScopusSearch, which fit a specified set of document subtypes.
9,727
def create_styles(title,defaults=None,mappings=None,host=cytoscape_host,port=cytoscape_port): if defaults: defaults_=[] for d in defaults: if d: defaults_.append(d) defaults=defaults_ if mappings: mappings_=[] for m in mappings: if m: mappings_.append(m) mappings=mappings_ try: update_style(title,defaults=defaults,mappings=mappings,host=host,port=port) print("Existing style was updated.") sys.stdout.flush() except: print("Creating new style.") sys.stdout.flush() URL="http://"+str(host)+":"+str(port)+"/v1/styles" PARAMS={"title":title,\ "defaults":defaults,\ "mappings":mappings} r = requests.post(url = URL, json = PARAMS) CheckResponse(r)
Creates a new visual style :param title: title of the visual style :param defaults: a list of dictionaries for each visualProperty :param mappings: a list of dictionaries for each visualProperty :param host: cytoscape host address, default=cytoscape_host :param port: cytoscape port, default=1234 :retunrs: nothing
9,728
def search_variant_sets(self, dataset_id): request = protocol.SearchVariantSetsRequest() request.dataset_id = dataset_id request.page_size = pb.int(self._page_size) return self._run_search_request( request, "variantsets", protocol.SearchVariantSetsResponse)
Returns an iterator over the VariantSets fulfilling the specified conditions from the specified Dataset. :param str dataset_id: The ID of the :class:`ga4gh.protocol.Dataset` of interest. :return: An iterator over the :class:`ga4gh.protocol.VariantSet` objects defined by the query parameters.
9,729
def verify(self, obj): if not isinstance(obj, int): raise ValidationError("Object is not a int", reason=, object=obj, type=type(obj), int_type=int) return obj
Verify that the object conforms to this verifier's schema Args: obj (object): A python object to verify Raises: ValidationError: If there is a problem verifying the dictionary, a ValidationError is thrown with at least the reason key set indicating the reason for the lack of validation.
9,730
def _tls_auth_encrypt(self, s): write_seq_num = struct.pack("!Q", self.tls_session.wcs.seq_num) self.tls_session.wcs.seq_num += 1 add_data = (write_seq_num + pkcs_i2osp(self.type, 1) + pkcs_i2osp(self.version, 2) + pkcs_i2osp(len(s), 2)) return self.tls_session.wcs.cipher.auth_encrypt(s, add_data, write_seq_num)
Return the TLSCiphertext.fragment for AEAD ciphers, i.e. the whole GenericAEADCipher. Also, the additional data is computed right here.
9,731
def pvlan_host_association(self, **kwargs): int_type = kwargs.pop().lower() name = kwargs.pop() pri_vlan = kwargs.pop() sec_vlan = kwargs.pop() callback = kwargs.pop(, self._callback) int_types = [, , , , ] if int_type not in int_types: raise ValueError("Incorrect int_type value.") if not pynos.utilities.valid_interface(int_type, name): raise ValueError( ) if not pynos.utilities.valid_vlan_id(pri_vlan): raise InvalidVlanId("`sec_vlan` must be between `1` and `4095`.") if not pynos.utilities.valid_vlan_id(sec_vlan): raise InvalidVlanId("`sec_vlan` must be between `1` and `4095`.") pvlan_args = dict(name=name, host_pri_pvlan=pri_vlan) associate_pvlan = getattr(self._interface, % int_type) config = associate_pvlan(**pvlan_args) sec_assoc = config.find() sec_assoc = ET.SubElement(sec_assoc, ) sec_assoc.text = sec_vlan return callback(config)
Set interface PVLAN association. Args: int_type (str): Type of interface. (gigabitethernet, tengigabitethernet, etc) name (str): Name of interface. (1/0/5, 1/0/10, etc) pri_vlan (str): The primary PVLAN. sec_vlan (str): The secondary PVLAN. callback (function): A function executed upon completion of the method. The only parameter passed to `callback` will be the ``ElementTree`` `config`. Returns: Return value of `callback`. Raises: KeyError: if `int_type`, `name`, `pri_vlan`, or `sec_vlan` is not specified. ValueError: if `int_type`, `name`, `pri_vlan`, or `sec_vlan` is invalid. Examples: >>> import pynos.device >>> switches = ['10.24.39.211', '10.24.39.203'] >>> auth = ('admin', 'password') >>> int_type = 'tengigabitethernet' >>> name = '225/0/38' >>> pri_vlan = '75' >>> sec_vlan = '100' >>> for switch in switches: ... conn = (switch, '22') ... with pynos.device.Device(conn=conn, auth=auth) as dev: ... output = dev.interface.private_vlan_type(name=pri_vlan, ... pvlan_type='primary') ... output = dev.interface.private_vlan_type(name=sec_vlan, ... pvlan_type='isolated') ... output = dev.interface.vlan_pvlan_association_add( ... name=pri_vlan, sec_vlan=sec_vlan) ... output = dev.interface.enable_switchport(int_type, ... name) ... output = dev.interface.private_vlan_mode( ... int_type=int_type, name=name, mode='host') ... output = dev.interface.pvlan_host_association( ... int_type=int_type, name=name, pri_vlan=pri_vlan, ... sec_vlan=sec_vlan) ... dev.interface.pvlan_host_association() ... # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): KeyError
9,732
def _make_image_description(self, datasets, **kwargs): translate_platform_name = {: , : , : , : , : , : , : , : } first_dataset = datasets if isinstance(datasets, list): LOG.debug("Datasets is a list of dataset") first_dataset = datasets[0] if in first_dataset.attrs: _platform_name = translate_platform_name.get( first_dataset.attrs[], first_dataset.attrs[]) elif in kwargs: _platform_name = translate_platform_name.get( kwargs[], kwargs[]) else: _platform_name = None _image_description = _image_description.encode() _image_description += if _platform_name is not None: _image_description += _platform_name _image_description += _image_description += first = True earliest = 0 for dataset in datasets: if first: earliest = dataset.attrs[] else: if dataset.attrs[] < earliest: earliest = dataset.attrs[] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") _image_description += _image_description += if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) _image_description += str(len(datasets)) elif in datasets.sizes: LOG.debug("len datasets: %s", datasets.sizes[]) _image_description += str(datasets.sizes[]) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") _image_description += _image_description += channels = self._make_channel_list(datasets, **kwargs) try: cns = self.translate_channel_name.get(kwargs[], {}) except KeyError: pass _image_description += self._channel_names(channels, cns, **kwargs) _image_description += self._add_sizes(datasets, first_dataset) _image_description += _image_description += self._add_proj4_string(datasets, first_dataset) _image_description += _image_description += _image_description += _image_description += % (0) _image_description += % (0) + _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): LOG.debug("Area extent: %s", first_dataset.attrs[].area_extent) else: LOG.debug("Area extent: %s", datasets.attrs[].area_extent) _image_description += self._add_calibration(channels, cns, datasets, **kwargs) return _image_description
generate image description for mitiff. Satellite: NOAA 18 Date and Time: 06:58 31/05-2016 SatDir: 0 Channels: 6 In this file: 1-VIS0.63 2-VIS0.86 3(3B)-IR3.7 4-IR10.8 5-IR11.5 6(3A)-VIS1.6 Xsize: 4720 Ysize: 5544 Map projection: Stereographic Proj string: +proj=stere +lon_0=0 +lat_0=90 +lat_ts=60 +ellps=WGS84 +towgs84=0,0,0 +units=km +x_0=2526000.000000 +y_0=5806000.000000 TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: 1.000000 Ay: 1.000000 Bx: -2526.000000 By: -262.000000 Satellite: <satellite name> Date and Time: <HH:MM dd/mm-yyyy> SatDir: 0 Channels: <number of chanels> In this file: <channels names in order> Xsize: <number of pixels x> Ysize: <number of pixels y> Map projection: Stereographic Proj string: <proj4 string with +x_0 and +y_0 which is the positive distance from proj origo to the lower left corner of the image data> TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: <pixels size x in km> Ay: <pixel size y in km> Bx: <left corner of upper right pixel in km> By: <upper corner of upper right pixel in km> if palette image write special palette if normal channel write table calibration: Table_calibration: <channel name>, <calibration type>, [<unit>], <no of bits of data>, [<calibration values space separated>]\n\n
9,733
def main(args): grr_config.CONFIG.AddContext(contexts.CLIENT_BUILD_CONTEXT) if args.subparser_name == "generate_client_config": templates.append(template) repack_configs = [] for repack_config in args.repack_configs: if "*" in repack_config: repack_configs.extend(glob.glob(repack_config)) else: repack_configs.append(repack_config) MultiTemplateRepacker().RepackTemplates( repack_configs, templates, args.output_dir, config=FLAGS.config, sign=args.sign, signed_template=args.signed_template) elif args.subparser_name == "sign_template": repacking.TemplateRepacker().SignTemplate( args.template, args.output_file, context=context) if not os.path.exists(args.output_file): raise RuntimeError("Signing failed: output not written")
Launch the appropriate builder.
9,734
def _setup_axes(cls, axes, info_axis=None, stat_axis=None, aliases=None, slicers=None, axes_are_reversed=False, build_axes=True, ns=None, docs=None): cls._AXIS_ORDERS = axes cls._AXIS_NUMBERS = {a: i for i, a in enumerate(axes)} cls._AXIS_LEN = len(axes) cls._AXIS_ALIASES = aliases or dict() cls._AXIS_IALIASES = {v: k for k, v in cls._AXIS_ALIASES.items()} cls._AXIS_NAMES = dict(enumerate(axes)) cls._AXIS_SLICEMAP = slicers or None cls._AXIS_REVERSED = axes_are_reversed setattr(cls, , cls.__name__.lower()) cls._ix = None if info_axis is not None: cls._info_axis_number = info_axis cls._info_axis_name = axes[info_axis] if stat_axis is not None: cls._stat_axis_number = stat_axis cls._stat_axis_name = axes[stat_axis] if build_axes: def set_axis(a, i): setattr(cls, a, properties.AxisProperty(i, docs.get(a, a))) cls._internal_names_set.add(a) if axes_are_reversed: m = cls._AXIS_LEN - 1 for i, a in cls._AXIS_NAMES.items(): set_axis(a, m - i) else: for i, a in cls._AXIS_NAMES.items(): set_axis(a, i) assert not isinstance(ns, dict)
Provide axes setup for the major PandasObjects. Parameters ---------- axes : the names of the axes in order (lowest to highest) info_axis_num : the axis of the selector dimension (int) stat_axis_num : the number of axis for the default stats (int) aliases : other names for a single axis (dict) slicers : how axes slice to others (dict) axes_are_reversed : boolean whether to treat passed axes as reversed (DataFrame) build_axes : setup the axis properties (default True)
9,735
def parent(self): def parent_element(): return WebElementWrapper(self.driver_wrapper, self.locator, self.element.parent) return self.execute_and_handle_webelement_exceptions(parent_element, )
Get the parent of the element @rtype: WebElementWrapper @return: Parent of webelementwrapper on which this was invoked
9,736
def record(self): while True: frames = [] self.stream.start_stream() for i in range(self.num_frames): data = self.stream.read(self.config.FRAMES_PER_BUFFER) frames.append(data) self.output.seek(0) w = wave.open(self.output, ) w.setnchannels(self.config.CHANNELS) w.setsampwidth(self.audio.get_sample_size(self.config.FORMAT)) w.setframerate(self.config.RATE) w.writeframes(b.join(frames)) w.close() yield
Record PyAudio stream into StringIO output This coroutine keeps stream open; the stream is closed in stop()
9,737
def argmin(self, axis=None, skipna=True, *args, **kwargs): nv.validate_argmin(args, kwargs) nv.validate_minmax_axis(axis) i8 = self.asi8 if self.hasnans: mask = self._isnan if mask.all() or not skipna: return -1 i8 = i8.copy() i8[mask] = np.iinfo().max return i8.argmin()
Returns the indices of the minimum values along an axis. See `numpy.ndarray.argmin` for more information on the `axis` parameter. See Also -------- numpy.ndarray.argmin
9,738
def next_event(self): if self._departures[0]._time < self._arrivals[0]._time: new_depart = heappop(self._departures) self._current_t = new_depart._time self._num_total -= 1 self.num_system -= 1 self.num_departures += 1 if self.collect_data and new_depart.agent_id in self.data: self.data[new_depart.agent_id][-1][2] = self._current_t if len(self.queue) > 0: agent = self.queue.popleft() if self.collect_data and agent.agent_id in self.data: self.data[agent.agent_id][-1][1] = self._current_t agent._time = self.service_f(self._current_t) agent.queue_action(self, 1) heappush(self._departures, agent) new_depart.queue_action(self, 2) self._update_time() return new_depart elif self._arrivals[0]._time < infty: arrival = heappop(self._arrivals) self._current_t = arrival._time if self._active: self._add_arrival() self.num_system += 1 self._num_arrivals += 1 if self.collect_data: b = 0 if self.num_system <= self.num_servers else 1 if arrival.agent_id not in self.data: self.data[arrival.agent_id] = \ [[arrival._time, 0, 0, len(self.queue) + b, self.num_system]] else: self.data[arrival.agent_id]\ .append([arrival._time, 0, 0, len(self.queue) + b, self.num_system]) arrival.queue_action(self, 0) if self.num_system <= self.num_servers: if self.collect_data: self.data[arrival.agent_id][-1][1] = arrival._time arrival._time = self.service_f(arrival._time) arrival.queue_action(self, 1) heappush(self._departures, arrival) else: self.queue.append(arrival) self._update_time()
Simulates the queue forward one event. Use :meth:`.simulate` instead. Returns ------- out : :class:`.Agent` (sometimes) If the next event is a departure then the departing agent is returned, otherwise nothing is returned. See Also -------- :meth:`.simulate` : Simulates the queue forward.
9,739
def generate_bigip_uri(base_uri, partition, name, sub_path, suffix, **kwargs): https://0.0.0.0/mgmt/tm/ltm/nat/CUSTOMER1nat52ahttps://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52https://0.0.0.0/mgmt/tm/ltm/nat/CUSTOMER1nat52a/wackyhttps://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52/wackyhttps://0.0.0.0/mgmt/tm/ltm/nat/a/thwockyhttps://0.0.0.0/mgmt/tm/ltm/nat/thwocky/~transform_nametransform_subpath _validate_uri_parts(base_uri, partition, name, sub_path, suffix, **kwargs) if kwargs.get(, False): if name != : name = name.replace(, ) if kwargs.get(, False): if sub_path != : sub_path = sub_path.replace(, ) if partition != : partition = + partition else: if sub_path: msg = \ raise InvalidURIComponentPart(msg) if sub_path != and partition != : sub_path = + sub_path if name != and partition != : name = + name tilded_partition_and_instance = partition + sub_path + name if suffix and not tilded_partition_and_instance: suffix = suffix.lstrip() REST_uri = base_uri + tilded_partition_and_instance + suffix return REST_uri
(str, str, str) --> str This function checks the supplied elements to see if each conforms to the specification for the appropriate part of the URI. These validations are conducted by the helper function _validate_uri_parts. After validation the parts are assembled into a valid BigIP REST URI string which is then submitted with appropriate metadata. >>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', \ 'CUSTOMER1', 'nat52', params={'a':1}) 'https://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52' >>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', \ 'CUSTOMER1', 'nat52', params={'a':1}, suffix='/wacky') 'https://0.0.0.0/mgmt/tm/ltm/nat/~CUSTOMER1~nat52/wacky' >>> generate_bigip_uri('https://0.0.0.0/mgmt/tm/ltm/nat/', '', '', \ params={'a':1}, suffix='/thwocky') 'https://0.0.0.0/mgmt/tm/ltm/nat/thwocky' ::Warning: There are cases where '/' and '~' characters are valid in the object name or subPath. This is indicated by passing the 'transform_name' or 'transform_subpath' boolean respectively as True. By default this is set to False.
9,740
def makeUserLoginMethod(username, password, locale=None): def _doLogin(soapStub): si = vim.ServiceInstance("ServiceInstance", soapStub) sm = si.content.sessionManager if not sm.currentSession: si.content.sessionManager.Login(username, password, locale) return _doLogin
Return a function that will call the vim.SessionManager.Login() method with the given parameters. The result of this function can be passed as the "loginMethod" to a SessionOrientedStub constructor.
9,741
def router_function(fn): @wraps(fn) def wrapper(*args, **kwargs): if platform_is_windows(): raise RuntimeError( "Router interface is not available on Win32 systems.\n" "Configure AMS routes using the TwinCAT router service." ) return fn(*args, **kwargs) return wrapper
Raise a runtime error if on Win32 systems. Decorator. Decorator for functions that interact with the router for the Linux implementation of the ADS library. Unlike the Windows implementation which uses a separate router daemon, the Linux library manages AMS routing in-process. As such, routing must be configured programatically via. the provided API. These endpoints are invalid on Win32 systems, so an exception will be raised.
9,742
def _get_content_type(self, file): if file.mimetype: return file.mimetype _, extension = os.path.splitext(file.name) extension = extension.strip() return media_types[extension] if extension in media_types else
Return content type of file. If file does not have a content type, make a guess.
9,743
def _encode_text(name, value, dummy0, dummy1): value = _utf_8_encode(value)[0] return b"\x02" + name + _PACK_INT(len(value) + 1) + value + b"\x00"
Encode a python unicode (python 2.x) / str (python 3.x).
9,744
def get_passenger_queue_stats(self): queue_stats = { "top_level_queue_size": 0.0, "passenger_queue_size": 0.0, } command = [self.config["passenger_status_bin"]] if str_to_bool(self.config["use_sudo"]): command.insert(0, self.config["sudo_cmd"]) try: proc1 = subprocess.Popen(command, stdout=subprocess.PIPE) (std_out, std_err) = proc1.communicate() except OSError: return {} if std_out is None: return {} re_colour = re.compile("\x1B\[([0-9]{1,3}((;[0-9]{1,3})*)?)?[m|K]") re_requests = re.compile(r"Requests") re_topqueue = re.compile(r"^top-level") gen_info_flag = 0 app_groups_flag = 0 for raw_line in std_out.splitlines(): line = re_colour.sub("", raw_line) if "General information" in line: gen_info_flag = 1 if "Application groups" in line: app_groups_flag = 1 elif re_requests.match(line) and re_topqueue.search(line): line_splitted = line.split() if gen_info_flag == 1 and line_splitted: queue_stats["top_level_queue_size"] = float( line_splitted[5]) elif re_requests.search(line) and not re_topqueue.search(line): line_splitted = line.split() if app_groups_flag == 1 and line_splitted: queue_stats["passenger_queue_size"] = float( line_splitted[3]) return queue_stats
Execute passenger-stats, parse its output, returnand requests in queue
9,745
def SetProtocol(self, protocol): protocol = protocol.lower().strip() if protocol not in [, ]: raise ValueError() self._analyzer.SetProtocol(protocol)
Sets the protocol that will be used to query Viper. Args: protocol (str): protocol to use to query Viper. Either 'http' or 'https'. Raises: ValueError: If an invalid protocol is selected.
9,746
def reset(self, hard=False): if hard: self.sendcommand(Vendapin.RESET, 1, 0x01) time.sleep(2) else: self.sendcommand(Vendapin.RESET) time.sleep(2) response = self.receivepacket() print( + str(response))
reset the card dispense, either soft or hard based on boolean 2nd arg
9,747
def _configure_key_pair(config): if "ssh_private_key" in config["auth"]: return config ssh_user = config["auth"]["ssh_user"] project = compute.projects().get( project=config["provider"]["project_id"]).execute() ssh_keys_str = next( (item for item in project["commonInstanceMetadata"].get("items", []) if item["key"] == "ssh-keys"), {}).get("value", "") ssh_keys = ssh_keys_str.split("\n") if ssh_keys_str else [] key_found = False for i in range(10): key_name = key_pair_name(i, config["provider"]["region"], config["provider"]["project_id"], ssh_user) public_key_path, private_key_path = key_pair_paths(key_name) for ssh_key in ssh_keys: key_parts = ssh_key.split(" ") if len(key_parts) != 3: continue if key_parts[2] == ssh_user and os.path.exists(private_key_path): key_found = True break if not key_found and not os.path.exists(private_key_path): logger.info("_configure_key_pair: " "Creating new key pair {}".format(key_name)) public_key, private_key = generate_rsa_key_pair() _create_project_ssh_key_pair(project, public_key, ssh_user) with open(private_key_path, "w") as f: f.write(private_key) os.chmod(private_key_path, 0o600) with open(public_key_path, "w") as f: f.write(public_key) key_found = True break if key_found: break assert key_found, "SSH keypair for user {} not found for {}".format( ssh_user, private_key_path) assert os.path.exists(private_key_path), ( "Private key file {} not found for user {}" "".format(private_key_path, ssh_user)) logger.info("_configure_key_pair: " "Private key not specified in config, using" "{}".format(private_key_path)) config["auth"]["ssh_private_key"] = private_key_path return config
Configure SSH access, using an existing key pair if possible. Creates a project-wide ssh key that can be used to access all the instances unless explicitly prohibited by instance config. The ssh-keys created by ray are of format: [USERNAME]:ssh-rsa [KEY_VALUE] [USERNAME] where: [USERNAME] is the user for the SSH key, specified in the config. [KEY_VALUE] is the public SSH key value.
9,748
def _set_mpls_interface(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("interface_type interface_name",mpls_interface.mpls_interface, yang_name="mpls-interface", rest_name="mpls-interface", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: None, u: u, u: u}}), is_container=, yang_name="mpls-interface", rest_name="mpls-interface", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: None, u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__mpls_interface = t if hasattr(self, ): self._set()
Setter method for mpls_interface, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface (list) If this variable is read-only (config: false) in the source YANG file, then _set_mpls_interface is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_mpls_interface() directly.
9,749
def render_document(template_name, data_name, output_name): env = Environment(loader=PackageLoader()) with open(output_name, ) as output_file: output = env.get_template(template_name).render(yaml.load(open(data_name))) output_file.write(output)
Combines a MarkDown template file from the aide_document package with a local associated YAML data file, then outputs the rendered combination to a local MarkDown output file. Parameters ========== template_name : String Exact name of the MarkDown template file from the aide_document/templates folder. Do not use the file path. data_name : String Relative file path from where this method is called to the location of the YAML data file to be used. output_name : String Relative file path from where this method is called to the location to which the output file is written. Examples ======== Suppose we have template.md in aide_document and a directory as follows: data/ params.yaml To render the document: >>> from aide_document import combine >>> combine.render_document('template.md', 'data/params.yaml', 'data/output.md') This will then combine the data and template files and write to a new output file within data/.
9,750
def create_asset(json): result = Asset(json[]) file_dict = json[][] result.fields = json[] result.url = file_dict[] result.mimeType = file_dict[] return result
Create :class:`.resources.Asset` from JSON. :param json: JSON dict. :return: Asset instance.
9,751
def _field_sort_name(cls, name): if isinstance(cls.__dict__[name], DateItemField): name = re.sub(, , name) name = re.sub(, , name) name = re.sub(, , name) return name
Get a sort key for a field name that determines the order fields should be written in. Fields names are kept unchanged, unless they are instances of :class:`DateItemField`, in which case `year`, `month`, and `day` are replaced by `date0`, `date1`, and `date2`, respectively, to make them appear in that order.
9,752
def parse_atoms(self): atom_site_header_tag = self.main_tag.getElementsByTagName("PDBx:atom_siteCategory") assert(len(atom_site_header_tag) == 1) atom_site_header_tag = atom_site_header_tag[0] atom_site_tags = atom_site_header_tag.getElementsByTagName("PDBx:atom_site") residue_map = {} residues_read = {} int_type = types.IntType for t in atom_site_tags: r, seqres, ResidueAA, Residue3AA = PDBML_slow.parse_atom_site(t, self.modified_residues) if r: if not(self.pdb_id in cases_with_ACE_residues_we_can_ignore and Residue3AA == ): full_residue_id = str(r) if residues_read.get(full_residue_id): assert(residues_read[full_residue_id] == (r.ResidueAA, seqres)) else: residues_read[full_residue_id] = (r.ResidueAA, seqres) residue_map[r.Chain] = residue_map.get(r.Chain, {}) assert(type(seqres) == int_type) residue_map[r.Chain][str(r)] = seqres atom_to_seqres_sequence_maps = {} for chain_id, atom_seqres_mapping in residue_map.iteritems(): atom_to_seqres_sequence_maps[chain_id] = SequenceMap.from_dict(atom_seqres_mapping) self.atom_to_seqres_sequence_maps = atom_to_seqres_sequence_maps
All ATOM lines are parsed even though only one per residue needs to be parsed. The reason for parsing all the lines is just to sanity-checks that the ATOMs within one residue are consistent with each other.
9,753
def t_whitespace(self, s): r self.add_token(, s) self.pos += len(s) pass
r'\s+
9,754
def init_publisher(app): @app.context_processor def inject_links(): return { : stack.top.websub_self_url, : stack.top.websub_hub_url, : stack.top.websub_self_link, : stack.top.websub_hub_link, }
Calling this with your flask app as argument is required for the publisher decorator to work.
9,755
def __tomo_linear_inv(freqs, ops, weights=None, trace=None): if weights is not None: W = np.array(weights) if W.ndim == 1: W = np.diag(W) S = np.array([vectorize(m).conj() for m in ops]).reshape(len(ops), ops[0].size) if weights is not None: S = np.dot(W, S) v = np.array(freqs) if weights is not None: v = np.dot(W, freqs) Sdg = S.T.conj() inv = np.linalg.pinv(np.dot(Sdg, S)) ret = devectorize(np.dot(inv, np.dot(Sdg, v))) if trace is not None: ret = trace * ret / np.trace(ret) return ret
Reconstruct a matrix through linear inversion. Args: freqs (list[float]): list of observed frequences. ops (list[np.array]): list of corresponding projectors. weights (list[float] or array_like): weights to be used for weighted fitting. trace (float or None): trace of returned operator. Returns: numpy.array: A numpy array of the reconstructed operator.
9,756
def find_branches(self, labels=False, unique=False): branches = [] if labels is True: identifier = [self.label] else: identifier = [self] if self._children == []: return identifier else: for child in self._children: if unique is True: for branch in child.find_branches(labels=labels, unique=True): if unique is True: return branches else: return identifier + branches
Recursively constructs a list of pointers of the tree's structure Args: labels (bool): If True, returned lists consist of node labels. If False (default), lists consist of node pointers. This option is mostly intended for debugging purposes. unique (bool): If True, return lists of all unique, linear branches of the tree. More accurately, it returns a list of lists where each list contains a single, unique, linear path from the calling node to the tree's leaf nodes. If False (default), a highly-nested list is returned where each nested list represents a branch point in the tree. See Examples for more. Examples: >>> from arandomness.trees import OmniTree >>> a = OmniTree(label='a') >>> b = OmniTree(label='b', parents=[a]) >>> c = OmniTree(label='c', parents=[b]) >>> d = OmniTree(label='d', parents=[b]) >>> e = OmniTree(label='e', parents=[c, d]) >>> a.find_branches(labels=True) ['a', ['b', ['c', ['e']], ['d', ['e']]]] >>> a.find_branches(labels=True, unique=True) [['a', 'b', 'c', 'e'], ['a', 'b', 'd', 'e']]
9,757
def __update_html(self, html): if platform.system() in ("Windows", "Microsoft"): html = re.sub(r"((?:[a-zA-Z]\:|\\\\[\w\.]+\\[\w.$]+)\\(?:[\w]+\\)*\w([\w.])+)", lambda x: foundations.strings.to_forward_slashes(x.group(1)), html) html = foundations.strings.replace(html, OrderedDict([(, ), ("\n", "")])) self.__evaluate_javascript("$(\"
Updates the View with given html content. :param html: Html content. :type html: unicode
9,758
def _future_command_unlocked(self, cmd): future = self._loop.create_future() asyncio_loop = self._loop.get_loop() def _done_callback(result): retval = result[] if not result[]: future.set_exception(HardwareError("Error executing synchronous command", command=cmd, return_value=retval)) else: future.set_result(retval) callback = functools.partial(asyncio_loop.call_soon_threadsafe, _done_callback) self._commands.put((cmd, callback, True, None)) return future
Run command as a coroutine and return a future. Args: loop (BackgroundEventLoop): The loop that we should attach the future too. cmd (list): The command and arguments that we wish to call. Returns: asyncio.Future: An awaitable future with the result of the operation.
9,759
def register (self, target): assert isinstance(target, VirtualTarget) if target.path(): signature = target.path() + "-" + target.name() else: signature = "-" + target.name() result = None if signature not in self.cache_: self.cache_ [signature] = [] for t in self.cache_ [signature]: a1 = t.action () a2 = target.action () if not result: if not a1 and not a2: result = t else: if a1 and a2 and a1.action_name () == a2.action_name () and a1.sources () == a2.sources (): ps1 = a1.properties () ps2 = a2.properties () p1 = ps1.base () + ps1.free () +\ b2.util.set.difference(ps1.dependency(), ps1.incidental()) p2 = ps2.base () + ps2.free () +\ b2.util.set.difference(ps2.dependency(), ps2.incidental()) if p1 == p2: result = t if not result: self.cache_ [signature].append (target) result = target self.recent_targets_.append(result) self.all_targets_.append(result) return result
Registers a new virtual target. Checks if there's already registered target, with the same name, type, project and subvariant properties, and also with the same sources and equal action. If such target is found it is retured and 'target' is not registered. Otherwise, 'target' is registered and returned.
9,760
def _list_store_resources(self, request, head_id, filter_ids, resource_fetcher, block_xform): resources = [] if filter_ids and not request.head_id: for resource_id in filter_ids: try: resources.append(resource_fetcher(resource_id)) except (KeyError, ValueError, TypeError): pass else: current_id = head_id while current_id in self._block_store: block = self._block_store[current_id].block resources += block_xform(block) header = BlockHeader() header.ParseFromString(block.header) current_id = header.previous_block_id if request.head_id and filter_ids: matches = { r.header_signature: r for r in resources if r.header_signature in filter_ids } resources = [matches[i] for i in filter_ids if i in matches] return resources
Builds a list of blocks or resources derived from blocks, handling multiple possible filter requests: - filtered by a set of ids - filtered by head block - filtered by both id and head block - not filtered (all current resources) Note: This method will fail if `_block_store` has not been set Args: request (object): The parsed protobuf request object head_id (str): Either request.head_id, or the current chain head filter_ids (list of str): the resource ids (if any) to filter by resource_fetcher (function): Fetches a resource by its id Expected args: resource_id: The id of the resource to be fetched Expected return: object: The resource to be appended to the results block_xform (function): Transforms a block into a list of resources Expected args: block: A block object from the block store Expected return: list: To be concatenated to the end of the results Returns: list: List of blocks or data from blocks. If filtered by ids, they will be listed in the same order as the id filters, otherwise they will be ordered from newest to oldest
9,761
def post(self, action, data=None, headers=None): return self.request(make_url(self.endpoint, action), method=, data=data, headers=headers)
Makes a GET request
9,762
def setColor(self, key, value): key = nativestring(key).capitalize() self._colorSet.setColor(key, value) if ( key == ): palette = self.palette() palette.setColor( palette.Base, value ) self.setPalette(palette)
Sets the color value for the inputed color. :param key | <unicode> value | <QtGui.QColor>
9,763
def matches(self, client, event_data): for f in self.filters: if not f(client, event_data): return False return True
True if all filters are matching.
9,764
def invalidate(self): endpoint = payload = { : self.access_token, : self.client_token, } self._ygg_req(endpoint, payload) self.client_token = self.access_token = self.available_profiles = [] self.selected_profile = {} return True
Invalidate access tokens with a client/access token pair Returns: dict: Empty or error dict
9,765
def _update_camera_pos(self): ch_em = self.events.transform_change with ch_em.blocker(self._update_transform): tr = self.transform tr.reset() up, forward, right = self._get_dim_vectors() pp1 = np.array([(0, 0, 0), (0, 0, -1), (1, 0, 0), (0, 1, 0)]) pp2 = np.array([(0, 0, 0), forward, right, up]) tr.set_mapping(pp1, pp2) tr.translate(-self._actual_distance * forward) self._rotate_tr() tr.scale([1.0/a for a in self._flip_factors]) tr.translate(np.array(self.center))
Set the camera position and orientation
9,766
def resolve_and_call(self, func, extra_env=None): kwargs = self.resolve_parameters(func, extra_env=extra_env) return func(**kwargs)
Resolve function arguments and call them, possibily filling from the environment
9,767
def _expand_numparse(disable_numparse, column_count): if isinstance(disable_numparse, Iterable): numparses = [True] * column_count for index in disable_numparse: numparses[index] = False return numparses else: return [not disable_numparse] * column_count
Return a list of bools of length `column_count` which indicates whether number parsing should be used on each column. If `disable_numparse` is a list of indices, each of those indices are False, and everything else is True. If `disable_numparse` is a bool, then the returned list is all the same.
9,768
def byaxis(self): space = self class NpyTensorSpacebyaxis(object): def __getitem__(self, indices): try: iter(indices) except TypeError: newshape = space.shape[indices] else: newshape = tuple(space.shape[i] for i in indices) if isinstance(space.weighting, ArrayWeighting): new_array = np.asarray(space.weighting.array[indices]) weighting = NumpyTensorSpaceArrayWeighting( new_array, space.weighting.exponent) else: weighting = space.weighting return type(space)(newshape, space.dtype, weighting=weighting) def __repr__(self): return repr(space) + return NpyTensorSpacebyaxis()
Return the subspace defined along one or several dimensions. Examples -------- Indexing with integers or slices: >>> space = odl.rn((2, 3, 4)) >>> space.byaxis[0] rn(2) >>> space.byaxis[1:] rn((3, 4)) Lists can be used to stack spaces arbitrarily: >>> space.byaxis[[2, 1, 2]] rn((4, 3, 4))
9,769
def lock_status(self, resource_id, parent_id=None, account_id=None): account_id = self.get_account_id(account_id) params = parent_id and {: parent_id} or None return self.http.get( "%s/%s/locks/%s" % (self.endpoint, account_id, resource_id), params=params, auth=self.get_api_auth())
Get the lock status for a given resource. for security groups, parent id is their vpc.
9,770
def find_children(self): for i in range(len(self.vertices)): self.vertices[i].children = [] for i in range(len(self.vertices)): for parent in self.vertices[i].parents: if i not in self.vertices[parent].children: self.vertices[parent].children.append(i)
Take a tree and set the children according to the parents. Takes a tree structure which lists the parents of each vertex and computes the children for each vertex and places them in.
9,771
def remove(self, *members): if self.serialized: members = list(map(self._dumps, members)) return self._client.srem(self.key_prefix, *members)
Removes @members from the set -> #int the number of members that were removed from the set
9,772
def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True): block = validate_block(block) return self._call("eth_getBlockByNumber", [block, tx_objects])
TODO: documentation https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber TESTED
9,773
def list(self, pattern=): if self._descriptors is None: self._descriptors = self._client.list_metric_descriptors( filter_string=self._filter_string, type_prefix=self._type_prefix) return [metric for metric in self._descriptors if fnmatch.fnmatch(metric.type, pattern)]
Returns a list of metric descriptors that match the filters. Args: pattern: An optional pattern to further filter the descriptors. This can include Unix shell-style wildcards. E.g. ``"compute*"``, ``"*cpu/load_??m"``. Returns: A list of MetricDescriptor objects that match the filters.
9,774
def _raw(cls, vertices, edges, out_edges, in_edges, head, tail): self = object.__new__(cls) self._out_edges = out_edges self._in_edges = in_edges self._head = head self._tail = tail self._vertices = vertices self._edges = edges return self
Private constructor for direct construction of an ObjectGraph from its attributes. vertices is the collection of vertices out_edges and in_edges map vertices to lists of edges head and tail map edges to objects.
9,775
def i18n(msg, event=None, lang=, domain=): if event is not None: language = event.client.language else: language = lang domain = Domain(domain) return domain.get(language, msg)
Gettext function wrapper to return a message in a specified language by domain To use internationalization (i18n) on your messages, import it as '_' and use as usual. Do not forget to supply the client's language setting.
9,776
def ring_coding(array): n = len(array) codes = np.ones(n, dtype=Path.code_type) * Path.LINETO codes[0] = Path.MOVETO codes[-1] = Path.CLOSEPOLY return codes
Produces matplotlib Path codes for exterior and interior rings of a polygon geometry.
9,777
def write_utf(self, s): utfstr = s.encode() length = len(utfstr) if length > 64: raise NamePartTooLongException self.write_byte(length) self.write_string(utfstr, length)
Writes a UTF-8 string of a given length to the packet
9,778
def preprocess_async(train_dataset, output_dir, eval_dataset=None, checkpoint=None, cloud=None): with warnings.catch_warnings(): warnings.simplefilter("ignore") if cloud is None: return _local.Local.preprocess(train_dataset, output_dir, eval_dataset, checkpoint) if not isinstance(cloud, dict): cloud = {} return _cloud.Cloud.preprocess(train_dataset, output_dir, eval_dataset, checkpoint, cloud)
Preprocess data. Produce output that can be used by training efficiently. Args: train_dataset: training data source to preprocess. Can be CsvDataset or BigQueryDataSet. If eval_dataset is None, the pipeline will randomly split train_dataset into train/eval set with 7:3 ratio. output_dir: The output directory to use. Preprocessing will create a sub directory under it for each run, and also update "latest" file which points to the latest preprocessed directory. Users are responsible for cleanup. Can be local or GCS path. eval_dataset: evaluation data source to preprocess. Can be CsvDataset or BigQueryDataSet. If specified, it will be used for evaluation during training, and train_dataset will be completely used for training. checkpoint: the Inception checkpoint to use. If None, a default checkpoint is used. cloud: a DataFlow pipeline option dictionary such as {'num_workers': 3}. If anything but not None, it will run in cloud. Otherwise, it runs locally. Returns: A google.datalab.utils.Job object that can be used to query state from or wait.
9,779
def save_figures(block, block_vars, gallery_conf): image_path_iterator = block_vars[] all_rst = u prev_count = len(image_path_iterator) for scraper in gallery_conf[]: rst = scraper(block, block_vars, gallery_conf) if not isinstance(rst, basestring): raise TypeError( % (scraper, type(rst), rst)) n_new = len(image_path_iterator) - prev_count for ii in range(n_new): current_path, _ = _find_image_ext( image_path_iterator.paths[prev_count + ii]) if not os.path.isfile(current_path): raise RuntimeError( % (scraper, current_path)) all_rst += rst return all_rst
Save all open figures of the example code-block. Parameters ---------- block : tuple A tuple containing the (label, content, line_number) of the block. block_vars : dict Dict of block variables. gallery_conf : dict Contains the configuration of Sphinx-Gallery Returns ------- images_rst : str rst code to embed the images in the document.
9,780
def get_points_and_weights(w_func=lambda x : np.ones(x.shape), left=-1.0, right=1.0, num_points=5, n=4096): dx = (float(right)-left)/n z = np.hstack(np.linspace(left+0.5*dx, right-0.5*dx, n)) w = dx*w_func(z) (a, b) = discrete_gautschi(z, w, num_points) alpha = a beta = np.sqrt(b) J = np.diag(alpha) J += np.diag(beta, k=-1) J += np.diag(beta, k=1) (points,v) = np.linalg.eigh(J) ind = points.argsort() points = points[ind] weights = v[0,:]**2 * w.sum() weights = weights[ind] return (points, weights)
Quadratude points and weights for a weighting function. Points and weights for approximating the integral I = \int_left^right f(x) w(x) dx given the weighting function w(x) using the approximation I ~ w_i f(x_i) Args: w_func: The weighting function w(x). Must be a function that takes one argument and is valid over the open interval (left, right). left: The left boundary of the interval right: The left boundary of the interval num_points: number of integration points to return n: the number of points to evaluate w_func at. Returns: A tuple (points, weights) where points is a sorted array of the points x_i and weights gives the corresponding weights w_i.
9,781
def submit_form(self, form, submit=None, **kwargs): method = form.method.upper() url = self._build_url(form.action) or self.url payload = form.serialize(submit=submit) serialized = payload.to_requests(method) send_args = self._build_send_args(**kwargs) send_args.update(serialized) response = self.session.request(method, url, **send_args) self._update_state(response)
Submit a form. :param Form form: Filled-out form object :param Submit submit: Optional `Submit` to click, if form includes multiple submits :param kwargs: Keyword arguments to `Session::send`
9,782
def predict_proba(self, X): y_probas = [] for yp in self.forward_iter(X, training=False): yp = yp[0] if isinstance(yp, tuple) else yp y_probas.append(to_numpy(yp)) y_proba = np.concatenate(y_probas, 0) return y_proba
Return the output of the module's forward method as a numpy array. If the module's forward method returns multiple outputs as a tuple, it is assumed that the first output contains the relevant information and the other values are ignored. If all values are relevant, consider using :func:`~skorch.NeuralNet.forward` instead. Parameters ---------- X : input data, compatible with skorch.dataset.Dataset By default, you should be able to pass: * numpy arrays * torch tensors * pandas DataFrame or Series * scipy sparse CSR matrices * a dictionary of the former three * a list/tuple of the former three * a Dataset If this doesn't work with your data, you have to pass a ``Dataset`` that can deal with the data. Returns ------- y_proba : numpy ndarray
9,783
def get_method_serializers(self, http_method): if http_method == and not in self.method_serializers: http_method = return ( self.method_serializers.get(http_method, self.serializers), self.default_method_media_type.get( http_method, self.default_media_type) )
Get request method serializers + default media type. Grab serializers from ``method_serializers`` if defined, otherwise returns the default serializers. Uses GET serializers for HEAD requests if no HEAD serializers were specified. The method also determines the default media type. :param http_method: HTTP method as a string. :returns: Tuple of serializers and default media type.
9,784
def get_protocol_version(protocol=None, target=None): target = get_py_internals(target) if protocol is None: protocol = target[] if protocol > cPickle.HIGHEST_PROTOCOL: warnings.warn( % cPickle.HIGHEST_PROTOCOL) protocol = cPickle.HIGHEST_PROTOCOL target_highest_protocol = target[] if protocol > target_highest_protocol: warnings.warn( % target_highest_protocol) protocol = target_highest_protocol return protocol
Return a suitable pickle protocol version for a given target. Arguments: target: The internals description of the targeted python version. If this is ``None`` the specification of the currently running python version will be used. protocol(None or int): The requested protocol version (or None for the default of the target python version). Returns: int: A suitable pickle protocol version.
9,785
def get_free_region(self, width, height): best_height = best_width = np.inf best_index = -1 for i in range(len(self._atlas_nodes)): y = self._fit(i, width, height) if y >= 0: node = self._atlas_nodes[i] if (y+height < best_height or (y+height == best_height and node[2] < best_width)): best_height = y+height best_index = i best_width = node[2] region = node[0], y, width, height if best_index == -1: return None node = region[0], region[1] + height, width self._atlas_nodes.insert(best_index, node) i = best_index+1 while i < len(self._atlas_nodes): node = self._atlas_nodes[i] prev_node = self._atlas_nodes[i-1] if node[0] < prev_node[0]+prev_node[2]: shrink = prev_node[0]+prev_node[2] - node[0] x, y, w = self._atlas_nodes[i] self._atlas_nodes[i] = x+shrink, y, w-shrink if self._atlas_nodes[i][2] <= 0: del self._atlas_nodes[i] i -= 1 else: break else: break i += 1 i = 0 while i < len(self._atlas_nodes)-1: node = self._atlas_nodes[i] next_node = self._atlas_nodes[i+1] if node[1] == next_node[1]: self._atlas_nodes[i] = node[0], node[1], node[2]+next_node[2] del self._atlas_nodes[i+1] else: i += 1 return region
Get a free region of given size and allocate it Parameters ---------- width : int Width of region to allocate height : int Height of region to allocate Returns ------- bounds : tuple | None A newly allocated region as (x, y, w, h) or None (if failed).
9,786
def parse_version(version: str) -> tuple: if not version: return None parts = version.split() missing = 3 - len(parts) return tuple(int(i) for i in parts + ([0] * missing))
Parse a string formatted X[.Y.Z] version number into a tuple >>> parse_version('10.2.3') (10, 2, 3) >>> parse_version('12') (12, 0, 0)
9,787
def prepare(self, cache): if cache is not None: np.copyto(self.qubits, cache) else: self.qubits.fill(0.0) self.qubits[0] = 1.0 self.cregs = [0] * self.n_qubits
Prepare to run next shot.
9,788
def conditional_accept(self): if self.ui.calfileRadio.isChecked() and str(self.ui.calChoiceCmbbx.currentText()) == : self.ui.noneRadio.setChecked(True) if self.ui.calfileRadio.isChecked(): try: x, freqs = self.datafile.get_calibration(str(self.ui.calChoiceCmbbx.currentText()), self.calf) except IOError: QtGui.QMessageBox.warning(self, "File Read Error", "Unable to read calibration file") return except KeyError: QtGui.QMessageBox.warning(self, "File Data Error", "Unable to find data in file") return if self.ui.frangeLowSpnbx.value() < freqs[0] or \ self.ui.frangeHighSpnbx.value() > freqs[-1]: QtGui.QMessageBox.warning(self, "Invalid Frequency Range", "Provided frequencys outside of calibration file range of {} - {} Hz".format(freqs[0], freqs[-1])) return self.accept()
Accepts the inputs if all values are valid and congruent. i.e. Valid datafile and frequency range within the given calibration dataset.
9,789
def _set_fetcher_options(self, base): ei_opts = self.distribution.get_option_dict().copy() fetch_directives = ( , , , , , , ) fetch_options = {} for key, val in ei_opts.iteritems(): if key not in fetch_directives: continue fetch_options[key.replace(, )] = val[1] settings = dict(easy_install=fetch_options) cfg_filename = os.path.join(base, ) setopt.edit_config(cfg_filename, settings)
When easy_install is about to run bdist_egg on a source dist, that source dist might have 'setup_requires' directives, requiring additional fetching. Ensure the fetcher options given to easy_install are available to that command as well.
9,790
def validate_key(request, group=None, perm=None, keytype=None): def update_last_access(): if KEY_LAST_USED_UPDATE: request.key.save() if request.user.is_authenticated() and is_valid_consumer(request): if not group and not perm and not keytype: return update_last_access() elif keytype: if request.key.is_type( keytype ): return update_last_access() elif group: if request.key.belongs_to_group( group ): return update_last_access() elif perm: if request.key.has_perm( perm ): return update_last_access() raise AccessForbidden raise AccessUnauthorized
Validate the given key
9,791
def receive(self, msg): x = self.routing while not isinstance(x, ActionList): if not x or not msg: return None, msg if not isinstance(x, dict): raise ValueError( % type(x)) _, value = msg.popitem(last=False) x = x.get(str(value)) return x, msg
Returns a (receiver, msg) pair, where receiver is `None` if no route for the message was found, or otherwise an object with a `receive` method that can accept that `msg`.
9,792
def _ecc_encode_compressed_point(private_key): byte_length = (private_key.curve.key_size + 7) // 8 public_numbers = private_key.public_key().public_numbers() y_map = [b"\x02", b"\x03"] if private_key.curve.name.startswith("secp"): y_order = public_numbers.y % 2 y = y_map[y_order] else: raise NotSupportedError("Non-prime curves are not supported at this time") return y + int_to_bytes(public_numbers.x, byte_length)
Encodes a compressed elliptic curve point as described in SEC-1 v2 section 2.3.3 http://www.secg.org/sec1-v2.pdf :param private_key: Private key from which to extract point data :type private_key: cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey :returns: Encoded compressed elliptic curve point :rtype: bytes :raises NotSupportedError: for non-prime curves
9,793
def default_multivariate_normal_fn(dtype, shape, name, trainable, add_variable_fn): del name, trainable, add_variable_fn dist = tfd.Normal(loc=tf.zeros(shape, dtype), scale=dtype.as_numpy_dtype(1)) batch_ndims = tf.size(input=dist.batch_shape_tensor()) return tfd.Independent(dist, reinterpreted_batch_ndims=batch_ndims)
Creates multivariate standard `Normal` distribution. Args: dtype: Type of parameter's event. shape: Python `list`-like representing the parameter's event shape. name: Python `str` name prepended to any created (or existing) `tf.Variable`s. trainable: Python `bool` indicating all created `tf.Variable`s should be added to the graph collection `GraphKeys.TRAINABLE_VARIABLES`. add_variable_fn: `tf.get_variable`-like `callable` used to create (or access existing) `tf.Variable`s. Returns: Multivariate standard `Normal` distribution.
9,794
def check_rights(self, resources, request=None): if not self.auth: return True try: if not self.auth.test_rights(resources, request=request): raise AssertionError() except AssertionError, e: raise HttpError("Access forbiden. {0}".format(e), status=status.HTTP_403_FORBIDDEN)
Check rights for resources. :return bool: True if operation is success else HTTP_403_FORBIDDEN
9,795
def get_task_summary(self, task_name): params = {: , : task_name} resp = self._client.get(self.resource(), params=params) map_reduce = resp.json().get() if map_reduce: json_summary = map_reduce.get() if json_summary: summary = Instance.TaskSummary(json.loads(json_summary)) summary.summary_text = map_reduce.get() summary.json_summary = json_summary return summary
Get a task's summary, mostly used for MapReduce. :param task_name: task name :return: summary as a dict parsed from JSON :rtype: dict
9,796
def ekrced(handle, segno, recno, column, nelts=_SPICE_EK_EKRCEX_ROOM_DEFAULT): handle = ctypes.c_int(handle) segno = ctypes.c_int(segno) recno = ctypes.c_int(recno) column = stypes.stringToCharP(column) nvals = ctypes.c_int(0) dvals = stypes.emptyDoubleVector(nelts) isnull = ctypes.c_int() libspice.ekrced_c(handle, segno, recno, column, ctypes.byref(nvals), dvals, ctypes.byref(isnull)) assert failed() or (nvals.value <= nelts) return nvals.value, stypes.cVectorToPython(dvals)[:nvals.value], bool(isnull.value)
Read data from a double precision column in a specified EK record. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekrced_c.html :param handle: Handle attached to EK file. :type handle: int :param segno: Index of segment containing record. :type segno: int :param recno: Record from which data is to be read. :type recno: int :param column: Column name. :type column: str :return: Number of values in column entry, Float values in column entry, Flag indicating whether column entry is null. :rtype: tuple
9,797
def users_create_many(self, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/users api_path = "/api/v2/users/create_many.json" return self.call(api_path, method="POST", data=data, **kwargs)
https://developer.zendesk.com/rest_api/docs/core/users#create-many-users
9,798
def set_zerg_client_params(self, server_sockets, use_fallback_socket=None): self._set(, server_sockets, multi=True) if use_fallback_socket is not None: self._set(, use_fallback_socket, cast=bool) for socket in listify(server_sockets): self._section.networking.register_socket(self._section.networking.sockets.default(socket)) return self._section
Zerg mode. Zergs params. :param str|unicode|list[str|unicode] server_sockets: Attaches zerg to a zerg server. :param bool use_fallback_socket: Fallback to normal sockets if the zerg server is not available
9,799
def listen(self, **kwargs: Any) -> Server: loop = cast(asyncio.AbstractEventLoop, self._loop) return (yield from loop.create_server( lambda: self._protocol( loop=loop, handle=self._handle, requset_charset=self.requset_charset, response_charset=self.response_charset, ), **kwargs, ))
bind host, port or sock