text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def _dict_to_map_str_str(self, d): """ Thrift requires the params and headers dict values to only contain str values. """ return dict(map( lambda (k, v): (k, str(v).lower() if isinstance(v, bool) else str(v)), d.iteritems() ))
[ "def", "_dict_to_map_str_str", "(", "self", ",", "d", ")", ":", "return", "dict", "(", "map", "(", "lambda", "(", "k", ",", "v", ")", ":", "(", "k", ",", "str", "(", "v", ")", ".", "lower", "(", ")", "if", "isinstance", "(", "v", ",", "bool", ")", "else", "str", "(", "v", ")", ")", ",", "d", ".", "iteritems", "(", ")", ")", ")" ]
35.375
19.375
def down_host(trg_queue, host, user=None, group=None, mode=None): ''' Down a host queue by creating a down file in the host queue directory ''' down(trg_queue, user=user, group=group, mode=mode, host=host)
[ "def", "down_host", "(", "trg_queue", ",", "host", ",", "user", "=", "None", ",", "group", "=", "None", ",", "mode", "=", "None", ")", ":", "down", "(", "trg_queue", ",", "user", "=", "user", ",", "group", "=", "group", ",", "mode", "=", "mode", ",", "host", "=", "host", ")" ]
54.5
24
def Stichlmair_wet(Vg, Vl, rhog, rhol, mug, voidage, specific_area, C1, C2, C3, H=1): r'''Calculates dry pressure drop across a packed column, using the Stichlmair [1]_ correlation. Uses three regressed constants for each type of packing, and voidage and specific area. This model is for irrigated columns only. Pressure drop is given by: .. math:: \frac{\Delta P_{irr}}{H} = \frac{\Delta P_{dry}}{H}\left(\frac {1-\epsilon + h_T}{1-\epsilon}\right)^{(2+c)/3} \left(\frac{\epsilon}{\epsilon-h_T}\right)^{4.65} .. math:: h_T = h_0\left[1 + 20\left(\frac{\Delta Pirr}{H\rho_L g}\right)^2\right] .. math:: Fr_L = \frac{V_L^2 a}{g \epsilon^{4.65}} .. math:: h_0 = 0.555 Fr_L^{1/3} .. math:: c = \frac{-C_1/Re_g - C_2/(2Re_g^{0.5})}{f_0} .. math:: \Delta P_{dry} = \frac{3}{4} f_0 \frac{1-\epsilon}{\epsilon^{4.65}} \rho_G \frac{H}{d_p}V_g^2 .. math:: f_0 = \frac{C_1}{Re_g} + \frac{C_2}{Re_g^{0.5}} + C_3 .. math:: d_p = \frac{6(1-\epsilon)}{a} Parameters ---------- Vg : float Superficial velocity of gas, Q/A [m/s] Vl : float Superficial velocity of liquid, Q/A [m/s] rhog : float Density of gas [kg/m^3] rhol : float Density of liquid [kg/m^3] mug : float Viscosity of gas [Pa*s] voidage : float Voidage of bed of packing material [] specific_area : float Specific area of the packing material [m^2/m^3] C1 : float Packing-specific constant [] C2 : float Packing-specific constant [] C3 : float Packing-specific constant [] H : float, optional Height of packing [m] Returns ------- dP : float Pressure drop across irrigated packing [Pa] Notes ----- This model is used by most process simulation tools. If H is not provided, it defaults to 1. If Z is not provided, it defaults to 1. A numerical solver is used and needed by this model. Its initial guess is the dry pressure drop. Convergence problems may occur. The model as described in [1]_ appears to have a typo, and could not match the example. As described in [2]_, however, the model works. Examples -------- Example is from [1]_, matches. >>> Stichlmair_wet(Vg=0.4, Vl = 5E-3, rhog=5., rhol=1200., mug=5E-5, ... voidage=0.68, specific_area=260., C1=32., C2=7., C3=1.) 539.8768237253518 References ---------- .. [1] Stichlmair, J., J. L. Bravo, and J. R. Fair. "General Model for Prediction of Pressure Drop and Capacity of Countercurrent Gas/liquid Packed Columns." Gas Separation & Purification 3, no. 1 (March 1989): 19-28. doi:10.1016/0950-4214(89)80016-7. .. [2] Piche, Simon R., Faical Larachi, and Bernard P. A. Grandjean. "Improving the Prediction of Irrigated Pressure Drop in Packed Absorption Towers." The Canadian Journal of Chemical Engineering 79, no. 4 (August 1, 2001): 584-94. doi:10.1002/cjce.5450790417. ''' dp = 6.0*(1.0 - voidage)/specific_area Re = Vg*rhog*dp/mug f0 = C1/Re + C2/Re**0.5 + C3 dP_dry = 3/4.*f0*(1-voidage)/voidage**4.65*rhog*H/dp*Vg*Vg c = (-C1/Re - C2/(2*Re**0.5))/f0 Frl = Vl**2*specific_area/(g*voidage**4.65) h0 = 0.555*Frl**(1/3.) def to_zero(dP_irr): hT = h0*(1.0 + 20.0*(dP_irr/H/rhol/g)**2) err = dP_dry/H*((1-voidage+hT)/(1.0 - voidage))**((2.0 + c)/3.)*(voidage/(voidage-hT))**4.65 -dP_irr/H return err return float(newton(to_zero, dP_dry))
[ "def", "Stichlmair_wet", "(", "Vg", ",", "Vl", ",", "rhog", ",", "rhol", ",", "mug", ",", "voidage", ",", "specific_area", ",", "C1", ",", "C2", ",", "C3", ",", "H", "=", "1", ")", ":", "dp", "=", "6.0", "*", "(", "1.0", "-", "voidage", ")", "/", "specific_area", "Re", "=", "Vg", "*", "rhog", "*", "dp", "/", "mug", "f0", "=", "C1", "/", "Re", "+", "C2", "/", "Re", "**", "0.5", "+", "C3", "dP_dry", "=", "3", "/", "4.", "*", "f0", "*", "(", "1", "-", "voidage", ")", "/", "voidage", "**", "4.65", "*", "rhog", "*", "H", "/", "dp", "*", "Vg", "*", "Vg", "c", "=", "(", "-", "C1", "/", "Re", "-", "C2", "/", "(", "2", "*", "Re", "**", "0.5", ")", ")", "/", "f0", "Frl", "=", "Vl", "**", "2", "*", "specific_area", "/", "(", "g", "*", "voidage", "**", "4.65", ")", "h0", "=", "0.555", "*", "Frl", "**", "(", "1", "/", "3.", ")", "def", "to_zero", "(", "dP_irr", ")", ":", "hT", "=", "h0", "*", "(", "1.0", "+", "20.0", "*", "(", "dP_irr", "/", "H", "/", "rhol", "/", "g", ")", "**", "2", ")", "err", "=", "dP_dry", "/", "H", "*", "(", "(", "1", "-", "voidage", "+", "hT", ")", "/", "(", "1.0", "-", "voidage", ")", ")", "**", "(", "(", "2.0", "+", "c", ")", "/", "3.", ")", "*", "(", "voidage", "/", "(", "voidage", "-", "hT", ")", ")", "**", "4.65", "-", "dP_irr", "/", "H", "return", "err", "return", "float", "(", "newton", "(", "to_zero", ",", "dP_dry", ")", ")" ]
33.638095
24.438095
def get(self, sid): """ Constructs a ParticipantContext :param sid: The sid :returns: twilio.rest.video.v1.room.room_participant.ParticipantContext :rtype: twilio.rest.video.v1.room.room_participant.ParticipantContext """ return ParticipantContext(self._version, room_sid=self._solution['room_sid'], sid=sid, )
[ "def", "get", "(", "self", ",", "sid", ")", ":", "return", "ParticipantContext", "(", "self", ".", "_version", ",", "room_sid", "=", "self", ".", "_solution", "[", "'room_sid'", "]", ",", "sid", "=", "sid", ",", ")" ]
35.9
24.7
def create_query_index( self, design_document_id=None, index_name=None, index_type='json', partitioned=False, **kwargs ): """ Creates either a JSON or a text query index in the remote database. :param str index_type: The type of the index to create. Can be either 'text' or 'json'. Defaults to 'json'. :param str design_document_id: Optional identifier of the design document in which the index will be created. If omitted the default is that each index will be created in its own design document. Indexes can be grouped into design documents for efficiency. However, a change to one index in a design document will invalidate all other indexes in the same document. :param str index_name: Optional name of the index. If omitted, a name will be generated automatically. :param list fields: A list of fields that should be indexed. For JSON indexes, the fields parameter is mandatory and should follow the 'sort syntax'. For example ``fields=['name', {'age': 'desc'}]`` will create an index on the 'name' field in ascending order and the 'age' field in descending order. For text indexes, the fields parameter is optional. If it is included then each field element in the fields list must be a single element dictionary where the key is the field name and the value is the field type. For example ``fields=[{'name': 'string'}, {'age': 'number'}]``. Valid field types are ``'string'``, ``'number'``, and ``'boolean'``. :param dict default_field: Optional parameter that specifies how the ``$text`` operator can be used with the index. Only valid when creating a text index. :param dict selector: Optional parameter that can be used to limit the index to a specific set of documents that match a query. It uses the same syntax used for selectors in queries. Only valid when creating a text index. :returns: An Index object representing the index created in the remote database """ if index_type == JSON_INDEX_TYPE: index = Index(self, design_document_id, index_name, partitioned=partitioned, **kwargs) elif index_type == TEXT_INDEX_TYPE: index = TextIndex(self, design_document_id, index_name, partitioned=partitioned, **kwargs) else: raise CloudantArgumentError(103, index_type) index.create() return index
[ "def", "create_query_index", "(", "self", ",", "design_document_id", "=", "None", ",", "index_name", "=", "None", ",", "index_type", "=", "'json'", ",", "partitioned", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "index_type", "==", "JSON_INDEX_TYPE", ":", "index", "=", "Index", "(", "self", ",", "design_document_id", ",", "index_name", ",", "partitioned", "=", "partitioned", ",", "*", "*", "kwargs", ")", "elif", "index_type", "==", "TEXT_INDEX_TYPE", ":", "index", "=", "TextIndex", "(", "self", ",", "design_document_id", ",", "index_name", ",", "partitioned", "=", "partitioned", ",", "*", "*", "kwargs", ")", "else", ":", "raise", "CloudantArgumentError", "(", "103", ",", "index_type", ")", "index", ".", "create", "(", ")", "return", "index" ]
52.25
24.903846
def execute(self, X): """Execute the program according to X. Parameters ---------- X : {array-like}, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. Returns ------- y_hats : array-like, shape = [n_samples] The result of executing the program on X. """ # Check for single-node programs node = self.program[0] if isinstance(node, float): return np.repeat(node, X.shape[0]) if isinstance(node, int): return X[:, node] apply_stack = [] for node in self.program: if isinstance(node, _Function): apply_stack.append([node]) else: # Lazily evaluate later apply_stack[-1].append(node) while len(apply_stack[-1]) == apply_stack[-1][0].arity + 1: # Apply functions that have sufficient arguments function = apply_stack[-1][0] terminals = [np.repeat(t, X.shape[0]) if isinstance(t, float) else X[:, t] if isinstance(t, int) else t for t in apply_stack[-1][1:]] intermediate_result = function(*terminals) if len(apply_stack) != 1: apply_stack.pop() apply_stack[-1].append(intermediate_result) else: return intermediate_result # We should never get here return None
[ "def", "execute", "(", "self", ",", "X", ")", ":", "# Check for single-node programs", "node", "=", "self", ".", "program", "[", "0", "]", "if", "isinstance", "(", "node", ",", "float", ")", ":", "return", "np", ".", "repeat", "(", "node", ",", "X", ".", "shape", "[", "0", "]", ")", "if", "isinstance", "(", "node", ",", "int", ")", ":", "return", "X", "[", ":", ",", "node", "]", "apply_stack", "=", "[", "]", "for", "node", "in", "self", ".", "program", ":", "if", "isinstance", "(", "node", ",", "_Function", ")", ":", "apply_stack", ".", "append", "(", "[", "node", "]", ")", "else", ":", "# Lazily evaluate later", "apply_stack", "[", "-", "1", "]", ".", "append", "(", "node", ")", "while", "len", "(", "apply_stack", "[", "-", "1", "]", ")", "==", "apply_stack", "[", "-", "1", "]", "[", "0", "]", ".", "arity", "+", "1", ":", "# Apply functions that have sufficient arguments", "function", "=", "apply_stack", "[", "-", "1", "]", "[", "0", "]", "terminals", "=", "[", "np", ".", "repeat", "(", "t", ",", "X", ".", "shape", "[", "0", "]", ")", "if", "isinstance", "(", "t", ",", "float", ")", "else", "X", "[", ":", ",", "t", "]", "if", "isinstance", "(", "t", ",", "int", ")", "else", "t", "for", "t", "in", "apply_stack", "[", "-", "1", "]", "[", "1", ":", "]", "]", "intermediate_result", "=", "function", "(", "*", "terminals", ")", "if", "len", "(", "apply_stack", ")", "!=", "1", ":", "apply_stack", ".", "pop", "(", ")", "apply_stack", "[", "-", "1", "]", ".", "append", "(", "intermediate_result", ")", "else", ":", "return", "intermediate_result", "# We should never get here", "return", "None" ]
33.702128
18.106383
def mapred(self, inputs, query, timeout=None): """ Run a MapReduce query. """ # Construct the job, optionally set the timeout... content = self._construct_mapred_json(inputs, query, timeout) # Do the request... url = self.mapred_path() headers = {'Content-Type': 'application/json'} status, headers, body = self._request('POST', url, headers, content) # Make sure the expected status code came back... if status != 200: raise RiakError( 'Error running MapReduce operation. Headers: %s Body: %s' % (repr(headers), repr(body))) result = json.loads(bytes_to_str(body)) return result
[ "def", "mapred", "(", "self", ",", "inputs", ",", "query", ",", "timeout", "=", "None", ")", ":", "# Construct the job, optionally set the timeout...", "content", "=", "self", ".", "_construct_mapred_json", "(", "inputs", ",", "query", ",", "timeout", ")", "# Do the request...", "url", "=", "self", ".", "mapred_path", "(", ")", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", "status", ",", "headers", ",", "body", "=", "self", ".", "_request", "(", "'POST'", ",", "url", ",", "headers", ",", "content", ")", "# Make sure the expected status code came back...", "if", "status", "!=", "200", ":", "raise", "RiakError", "(", "'Error running MapReduce operation. Headers: %s Body: %s'", "%", "(", "repr", "(", "headers", ")", ",", "repr", "(", "body", ")", ")", ")", "result", "=", "json", ".", "loads", "(", "bytes_to_str", "(", "body", ")", ")", "return", "result" ]
35.55
18.15
def parseUnits(self, inp): """Carries out a conversion (represented as a string) and returns the result as a human-readable string. Args: inp (str): Text representing a unit conversion, which should include a magnitude, a description of the initial units, and a description of the target units to which the quantity should be converted. Returns: A quantities object representing the converted quantity and its new units. """ quantity = self.convert(inp) units = ' '.join(str(quantity.units).split(' ')[1:]) return NumberService.parseMagnitude(quantity.item()) + " " + units
[ "def", "parseUnits", "(", "self", ",", "inp", ")", ":", "quantity", "=", "self", ".", "convert", "(", "inp", ")", "units", "=", "' '", ".", "join", "(", "str", "(", "quantity", ".", "units", ")", ".", "split", "(", "' '", ")", "[", "1", ":", "]", ")", "return", "NumberService", ".", "parseMagnitude", "(", "quantity", ".", "item", "(", ")", ")", "+", "\" \"", "+", "units" ]
41.588235
21.705882
def _get_intercepts(self): """ Concatenate all intercepts of the classifier. """ temp_arr = self.temp('arr') for layer in self.intercepts: inter = ', '.join([self.repr(b) for b in layer]) yield temp_arr.format(inter)
[ "def", "_get_intercepts", "(", "self", ")", ":", "temp_arr", "=", "self", ".", "temp", "(", "'arr'", ")", "for", "layer", "in", "self", ".", "intercepts", ":", "inter", "=", "', '", ".", "join", "(", "[", "self", ".", "repr", "(", "b", ")", "for", "b", "in", "layer", "]", ")", "yield", "temp_arr", ".", "format", "(", "inter", ")" ]
34.125
6.875
def configure_logging(level=logging.DEBUG): '''Configures the root logger for command line applications. A stream handler will be added to the logger that directs messages to the standard error stream. By default, *no* messages will be filtered out: set a higher level on derived/child loggers to achieve filtering. Warning ------- Logging should only be configured once at the main entry point of the application! ''' fmt = '%(asctime)s | %(levelname)-8s | %(name)-40s | %(message)s' datefmt = '%Y-%m-%d %H:%M:%S' formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) logger = logging.getLogger() # returns the root logger stderr_handler = logging.StreamHandler(stream=sys.stderr) stderr_handler.name = 'err' stderr_handler.setLevel(level) stderr_handler.setFormatter(formatter) logger.addHandler(stderr_handler)
[ "def", "configure_logging", "(", "level", "=", "logging", ".", "DEBUG", ")", ":", "fmt", "=", "'%(asctime)s | %(levelname)-8s | %(name)-40s | %(message)s'", "datefmt", "=", "'%Y-%m-%d %H:%M:%S'", "formatter", "=", "logging", ".", "Formatter", "(", "fmt", "=", "fmt", ",", "datefmt", "=", "datefmt", ")", "logger", "=", "logging", ".", "getLogger", "(", ")", "# returns the root logger", "stderr_handler", "=", "logging", ".", "StreamHandler", "(", "stream", "=", "sys", ".", "stderr", ")", "stderr_handler", ".", "name", "=", "'err'", "stderr_handler", ".", "setLevel", "(", "level", ")", "stderr_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "stderr_handler", ")" ]
34.92
22.12
def insert_bucket_acl(self, bucket_name, entity, role, user_project=None): """ Creates a new ACL entry on the specified bucket_name. See: https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert :param bucket_name: Name of a bucket_name. :type bucket_name: str :param entity: The entity holding the permission, in one of the following forms: user-userId, user-email, group-groupId, group-email, domain-domain, project-team-projectId, allUsers, allAuthenticatedUsers. See: https://cloud.google.com/storage/docs/access-control/lists#scopes :type entity: str :param role: The access permission for the entity. Acceptable values are: "OWNER", "READER", "WRITER". :type role: str :param user_project: (Optional) The project to be billed for this request. Required for Requester Pays buckets. :type user_project: str """ self.log.info('Creating a new ACL entry in bucket: %s', bucket_name) client = self.get_conn() bucket = client.bucket(bucket_name=bucket_name) bucket.acl.reload() bucket.acl.entity_from_dict(entity_dict={"entity": entity, "role": role}) if user_project: bucket.acl.user_project = user_project bucket.acl.save() self.log.info('A new ACL entry created in bucket: %s', bucket_name)
[ "def", "insert_bucket_acl", "(", "self", ",", "bucket_name", ",", "entity", ",", "role", ",", "user_project", "=", "None", ")", ":", "self", ".", "log", ".", "info", "(", "'Creating a new ACL entry in bucket: %s'", ",", "bucket_name", ")", "client", "=", "self", ".", "get_conn", "(", ")", "bucket", "=", "client", ".", "bucket", "(", "bucket_name", "=", "bucket_name", ")", "bucket", ".", "acl", ".", "reload", "(", ")", "bucket", ".", "acl", ".", "entity_from_dict", "(", "entity_dict", "=", "{", "\"entity\"", ":", "entity", ",", "\"role\"", ":", "role", "}", ")", "if", "user_project", ":", "bucket", ".", "acl", ".", "user_project", "=", "user_project", "bucket", ".", "acl", ".", "save", "(", ")", "self", ".", "log", ".", "info", "(", "'A new ACL entry created in bucket: %s'", ",", "bucket_name", ")" ]
48.931034
23.551724
def _set_gre_source(self, v, load=False): """ Setter method for gre_source, mapped from YANG variable /interface/tunnel/gre_source (container) If this variable is read-only (config: false) in the source YANG file, then _set_gre_source is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_gre_source() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=gre_source.gre_source, is_container='container', presence=False, yang_name="gre-source", rest_name="source", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of tunnel', u'cli-full-no': None, u'alt-name': u'source'}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """gre_source must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=gre_source.gre_source, is_container='container', presence=False, yang_name="gre-source", rest_name="source", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of tunnel', u'cli-full-no': None, u'alt-name': u'source'}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='container', is_config=True)""", }) self.__gre_source = t if hasattr(self, '_set'): self._set()
[ "def", "_set_gre_source", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "gre_source", ".", "gre_source", ",", "is_container", "=", "'container'", ",", "presence", "=", "False", ",", "yang_name", "=", "\"gre-source\"", ",", "rest_name", "=", "\"source\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Source of tunnel'", ",", "u'cli-full-no'", ":", "None", ",", "u'alt-name'", ":", "u'source'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-gre-vxlan'", ",", "defining_module", "=", "'brocade-gre-vxlan'", ",", "yang_type", "=", "'container'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"gre_source must be of a type compatible with container\"\"\"", ",", "'defined-type'", ":", "\"container\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=gre_source.gre_source, is_container='container', presence=False, yang_name=\"gre-source\", rest_name=\"source\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Source of tunnel', u'cli-full-no': None, u'alt-name': u'source'}}, namespace='urn:brocade.com:mgmt:brocade-gre-vxlan', defining_module='brocade-gre-vxlan', yang_type='container', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__gre_source", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
76.318182
35.318182
def create_process(cmd, root_helper=None, addl_env=None, log_output=True): """Create a process object for the given command. The return value will be a tuple of the process object and the list of command arguments used to create it. """ if root_helper: cmd = shlex.split(root_helper) + cmd cmd = map(str, cmd) log_output and LOG.info("Running command: %s", cmd) env = os.environ.copy() if addl_env: env.update(addl_env) obj = subprocess_popen(cmd, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) return obj, cmd
[ "def", "create_process", "(", "cmd", ",", "root_helper", "=", "None", ",", "addl_env", "=", "None", ",", "log_output", "=", "True", ")", ":", "if", "root_helper", ":", "cmd", "=", "shlex", ".", "split", "(", "root_helper", ")", "+", "cmd", "cmd", "=", "map", "(", "str", ",", "cmd", ")", "log_output", "and", "LOG", ".", "info", "(", "\"Running command: %s\"", ",", "cmd", ")", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "if", "addl_env", ":", "env", ".", "update", "(", "addl_env", ")", "obj", "=", "subprocess_popen", "(", "cmd", ",", "shell", "=", "False", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "env", "=", "env", ")", "return", "obj", ",", "cmd" ]
34.473684
20.052632
def _prepare(self, kwargs=None): """ Updates the function arguments and creates a :class:`asyncio.Task` from the Action. *kwargs* is an optional dictionnary of additional arguments to pass to the Action function. .. warning:: *kwargs* will overwrite existing keys in *self.args*. .. note:: If the Action func is blocking (not a coroutine function), it will be executed in an `Executor`_. .. _Executor: https://docs.python.org/3/library/asyncio-eventloop.html#executor """ if kwargs is not None: # Note: This will overwrite existing keys in self.args. # This is the wanted behavior. self.args.update(kwargs) if asyncio.iscoroutinefunction(self.func): task = asyncio.ensure_future(self.func(**self.args)) else: # FIXME: is that clean enough ? task = asyncio.get_event_loop() \ .run_in_executor(None, functools.partial(self.func, **self.args)) return task
[ "def", "_prepare", "(", "self", ",", "kwargs", "=", "None", ")", ":", "if", "kwargs", "is", "not", "None", ":", "# Note: This will overwrite existing keys in self.args.", "# This is the wanted behavior.", "self", ".", "args", ".", "update", "(", "kwargs", ")", "if", "asyncio", ".", "iscoroutinefunction", "(", "self", ".", "func", ")", ":", "task", "=", "asyncio", ".", "ensure_future", "(", "self", ".", "func", "(", "*", "*", "self", ".", "args", ")", ")", "else", ":", "# FIXME: is that clean enough ?", "task", "=", "asyncio", ".", "get_event_loop", "(", ")", ".", "run_in_executor", "(", "None", ",", "functools", ".", "partial", "(", "self", ".", "func", ",", "*", "*", "self", ".", "args", ")", ")", "return", "task" ]
36.0625
21.6875
def output_domain(gandi, domain, output_keys, justify=12): """ Helper to output a domain information.""" if 'nameservers' in domain: domain['nameservers'] = format_list(domain['nameservers']) if 'services' in domain: domain['services'] = format_list(domain['services']) if 'tags' in domain: domain['tags'] = format_list(domain['tags']) output_generic(gandi, domain, output_keys, justify) if 'created' in output_keys: output_line(gandi, 'created', domain['date_created'], justify) if 'expires' in output_keys: date_end = domain.get('date_registry_end') if date_end: days_left = (date_end - datetime.now()).days output_line(gandi, 'expires', '%s (in %d days)' % (date_end, days_left), justify) if 'updated' in output_keys: output_line(gandi, 'updated', domain['date_updated'], justify)
[ "def", "output_domain", "(", "gandi", ",", "domain", ",", "output_keys", ",", "justify", "=", "12", ")", ":", "if", "'nameservers'", "in", "domain", ":", "domain", "[", "'nameservers'", "]", "=", "format_list", "(", "domain", "[", "'nameservers'", "]", ")", "if", "'services'", "in", "domain", ":", "domain", "[", "'services'", "]", "=", "format_list", "(", "domain", "[", "'services'", "]", ")", "if", "'tags'", "in", "domain", ":", "domain", "[", "'tags'", "]", "=", "format_list", "(", "domain", "[", "'tags'", "]", ")", "output_generic", "(", "gandi", ",", "domain", ",", "output_keys", ",", "justify", ")", "if", "'created'", "in", "output_keys", ":", "output_line", "(", "gandi", ",", "'created'", ",", "domain", "[", "'date_created'", "]", ",", "justify", ")", "if", "'expires'", "in", "output_keys", ":", "date_end", "=", "domain", ".", "get", "(", "'date_registry_end'", ")", "if", "date_end", ":", "days_left", "=", "(", "date_end", "-", "datetime", ".", "now", "(", ")", ")", ".", "days", "output_line", "(", "gandi", ",", "'expires'", ",", "'%s (in %d days)'", "%", "(", "date_end", ",", "days_left", ")", ",", "justify", ")", "if", "'updated'", "in", "output_keys", ":", "output_line", "(", "gandi", ",", "'updated'", ",", "domain", "[", "'date_updated'", "]", ",", "justify", ")" ]
35.076923
20.576923
def vesting(ctx, account): """ List accounts vesting balances """ account = Account(account, full=True) t = [["vesting_id", "claimable"]] for vest in account["vesting_balances"]: vesting = Vesting(vest) t.append([vesting["id"], str(vesting.claimable)]) print_table(t)
[ "def", "vesting", "(", "ctx", ",", "account", ")", ":", "account", "=", "Account", "(", "account", ",", "full", "=", "True", ")", "t", "=", "[", "[", "\"vesting_id\"", ",", "\"claimable\"", "]", "]", "for", "vest", "in", "account", "[", "\"vesting_balances\"", "]", ":", "vesting", "=", "Vesting", "(", "vest", ")", "t", ".", "append", "(", "[", "vesting", "[", "\"id\"", "]", ",", "str", "(", "vesting", ".", "claimable", ")", "]", ")", "print_table", "(", "t", ")" ]
33.222222
7.777778
def post(self, request, provider=None): """ method called on POST request :param django.http.HttpRequest request: The current request object :param unicode provider: Optional parameter. The user provider suffix. """ # if settings.CAS_FEDERATE is not True redirect to the login page if not settings.CAS_FEDERATE: logger.warning("CAS_FEDERATE is False, set it to True to use federation") return redirect("cas_server:login") # POST with a provider suffix, this is probably an SLO request. csrf is disabled for # allowing SLO requests reception try: provider = FederatedIendityProvider.objects.get(suffix=provider) auth = self.get_cas_client(request, provider) try: auth.clean_sessions(request.POST['logoutRequest']) except (KeyError, AttributeError): pass return HttpResponse("ok") # else, a User is trying to log in using an identity provider except FederatedIendityProvider.DoesNotExist: # Manually checking for csrf to protect the code below reason = CsrfViewMiddleware().process_view(request, None, (), {}) if reason is not None: # pragma: no cover (csrf checks are disabled during tests) return reason # Failed the test, stop here. form = forms.FederateSelect(request.POST) if form.is_valid(): params = utils.copy_params( request.POST, ignore={"provider", "csrfmiddlewaretoken", "ticket", "lt"} ) if params.get("renew") == "False": del params["renew"] url = utils.reverse_params( "cas_server:federateAuth", kwargs=dict(provider=form.cleaned_data["provider"].suffix), params=params ) return HttpResponseRedirect(url) else: return redirect("cas_server:login")
[ "def", "post", "(", "self", ",", "request", ",", "provider", "=", "None", ")", ":", "# if settings.CAS_FEDERATE is not True redirect to the login page", "if", "not", "settings", ".", "CAS_FEDERATE", ":", "logger", ".", "warning", "(", "\"CAS_FEDERATE is False, set it to True to use federation\"", ")", "return", "redirect", "(", "\"cas_server:login\"", ")", "# POST with a provider suffix, this is probably an SLO request. csrf is disabled for", "# allowing SLO requests reception", "try", ":", "provider", "=", "FederatedIendityProvider", ".", "objects", ".", "get", "(", "suffix", "=", "provider", ")", "auth", "=", "self", ".", "get_cas_client", "(", "request", ",", "provider", ")", "try", ":", "auth", ".", "clean_sessions", "(", "request", ".", "POST", "[", "'logoutRequest'", "]", ")", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "pass", "return", "HttpResponse", "(", "\"ok\"", ")", "# else, a User is trying to log in using an identity provider", "except", "FederatedIendityProvider", ".", "DoesNotExist", ":", "# Manually checking for csrf to protect the code below", "reason", "=", "CsrfViewMiddleware", "(", ")", ".", "process_view", "(", "request", ",", "None", ",", "(", ")", ",", "{", "}", ")", "if", "reason", "is", "not", "None", ":", "# pragma: no cover (csrf checks are disabled during tests)", "return", "reason", "# Failed the test, stop here.", "form", "=", "forms", ".", "FederateSelect", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "params", "=", "utils", ".", "copy_params", "(", "request", ".", "POST", ",", "ignore", "=", "{", "\"provider\"", ",", "\"csrfmiddlewaretoken\"", ",", "\"ticket\"", ",", "\"lt\"", "}", ")", "if", "params", ".", "get", "(", "\"renew\"", ")", "==", "\"False\"", ":", "del", "params", "[", "\"renew\"", "]", "url", "=", "utils", ".", "reverse_params", "(", "\"cas_server:federateAuth\"", ",", "kwargs", "=", "dict", "(", "provider", "=", "form", ".", "cleaned_data", "[", "\"provider\"", "]", ".", "suffix", ")", ",", "params", "=", "params", ")", "return", "HttpResponseRedirect", "(", "url", ")", "else", ":", "return", "redirect", "(", "\"cas_server:login\"", ")" ]
48
19.209302
def sleep(self, seconds): """ Sleep in simulated time. """ start = self.time() while (self.time() - start < seconds and not self.need_to_stop.is_set()): self.need_to_stop.wait(self.sim_time)
[ "def", "sleep", "(", "self", ",", "seconds", ")", ":", "start", "=", "self", ".", "time", "(", ")", "while", "(", "self", ".", "time", "(", ")", "-", "start", "<", "seconds", "and", "not", "self", ".", "need_to_stop", ".", "is_set", "(", ")", ")", ":", "self", ".", "need_to_stop", ".", "wait", "(", "self", ".", "sim_time", ")" ]
31.25
7.5
def from_params(cls, params: Iterable[Tuple[str, Params]] = ()) -> Optional['RegularizerApplicator']: """ Converts a List of pairs (regex, params) into an RegularizerApplicator. This list should look like [["regex1", {"type": "l2", "alpha": 0.01}], ["regex2", "l1"]] where each parameter receives the penalty corresponding to the first regex that matches its name (which may be no regex and hence no penalty). The values can either be strings, in which case they correspond to the names of regularizers, or dictionaries, in which case they must contain the "type" key, corresponding to the name of a regularizer. In addition, they may contain auxiliary named parameters which will be fed to the regularizer itself. To determine valid auxiliary parameters, please refer to the torch.nn.init documentation. Parameters ---------- params : ``Params``, required. A Params object containing a "regularizers" key. Returns ------- A RegularizerApplicator containing the specified Regularizers, or ``None`` if no Regularizers are specified. """ if not params: return None instantiated_regularizers = [] for parameter_regex, regularizer_params in params: if isinstance(regularizer_params, str): regularizer = Regularizer.by_name(regularizer_params)() else: regularizer_type = Regularizer.by_name(regularizer_params.pop("type")) regularizer = regularizer_type(**regularizer_params) # type: ignore instantiated_regularizers.append((parameter_regex, regularizer)) return RegularizerApplicator(instantiated_regularizers)
[ "def", "from_params", "(", "cls", ",", "params", ":", "Iterable", "[", "Tuple", "[", "str", ",", "Params", "]", "]", "=", "(", ")", ")", "->", "Optional", "[", "'RegularizerApplicator'", "]", ":", "if", "not", "params", ":", "return", "None", "instantiated_regularizers", "=", "[", "]", "for", "parameter_regex", ",", "regularizer_params", "in", "params", ":", "if", "isinstance", "(", "regularizer_params", ",", "str", ")", ":", "regularizer", "=", "Regularizer", ".", "by_name", "(", "regularizer_params", ")", "(", ")", "else", ":", "regularizer_type", "=", "Regularizer", ".", "by_name", "(", "regularizer_params", ".", "pop", "(", "\"type\"", ")", ")", "regularizer", "=", "regularizer_type", "(", "*", "*", "regularizer_params", ")", "# type: ignore", "instantiated_regularizers", ".", "append", "(", "(", "parameter_regex", ",", "regularizer", ")", ")", "return", "RegularizerApplicator", "(", "instantiated_regularizers", ")" ]
47.783784
28.918919
def destination(self, bearing, distance): """Calculate destination locations for given distance and bearings. Args: bearing (float): Bearing to move on in degrees distance (float): Distance in kilometres Returns: list of list of Point: Groups of points shifted by ``distance`` and ``bearing`` """ return (segment.destination(bearing, distance) for segment in self)
[ "def", "destination", "(", "self", ",", "bearing", ",", "distance", ")", ":", "return", "(", "segment", ".", "destination", "(", "bearing", ",", "distance", ")", "for", "segment", "in", "self", ")" ]
37.25
20.083333
def get_grade_entry_form_for_update(self, grade_entry_id): """Gets the grade entry form for updating an existing entry. A new grade entry form should be requested for each update transaction. arg: grade_entry_id (osid.id.Id): the ``Id`` of the ``GradeEntry`` return: (osid.grading.GradeEntryForm) - the grade entry form raise: NotFound - ``grade_entry_id`` is not found raise: NullArgument - ``grade_entry_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ collection = JSONClientValidated('grading', collection='GradeEntry', runtime=self._runtime) if not isinstance(grade_entry_id, ABCId): raise errors.InvalidArgument('the argument is not a valid OSID Id') if (grade_entry_id.get_identifier_namespace() != 'grading.GradeEntry' or grade_entry_id.get_authority() != self._authority): raise errors.InvalidArgument() result = collection.find_one({'_id': ObjectId(grade_entry_id.get_identifier())}) obj_form = objects.GradeEntryForm( osid_object_map=result, effective_agent_id=str(self.get_effective_agent_id()), runtime=self._runtime, proxy=self._proxy) self._forms[obj_form.get_id().get_identifier()] = not UPDATED return obj_form
[ "def", "get_grade_entry_form_for_update", "(", "self", ",", "grade_entry_id", ")", ":", "collection", "=", "JSONClientValidated", "(", "'grading'", ",", "collection", "=", "'GradeEntry'", ",", "runtime", "=", "self", ".", "_runtime", ")", "if", "not", "isinstance", "(", "grade_entry_id", ",", "ABCId", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'the argument is not a valid OSID Id'", ")", "if", "(", "grade_entry_id", ".", "get_identifier_namespace", "(", ")", "!=", "'grading.GradeEntry'", "or", "grade_entry_id", ".", "get_authority", "(", ")", "!=", "self", ".", "_authority", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", ")", "result", "=", "collection", ".", "find_one", "(", "{", "'_id'", ":", "ObjectId", "(", "grade_entry_id", ".", "get_identifier", "(", ")", ")", "}", ")", "obj_form", "=", "objects", ".", "GradeEntryForm", "(", "osid_object_map", "=", "result", ",", "effective_agent_id", "=", "str", "(", "self", ".", "get_effective_agent_id", "(", ")", ")", ",", "runtime", "=", "self", ".", "_runtime", ",", "proxy", "=", "self", ".", "_proxy", ")", "self", ".", "_forms", "[", "obj_form", ".", "get_id", "(", ")", ".", "get_identifier", "(", ")", "]", "=", "not", "UPDATED", "return", "obj_form" ]
46.117647
21.911765
def upload_aws(target_filepath, metadata, access_token, base_url=OH_BASE_URL, remote_file_info=None, project_member_id=None, max_bytes=MAX_FILE_DEFAULT): """ Upload a file from a local filepath using the "direct upload" API. Equivalent to upload_file. To learn more about this API endpoint see: * https://www.openhumans.org/direct-sharing/on-site-data-upload/ * https://www.openhumans.org/direct-sharing/oauth2-data-upload/ :param target_filepath: This field is the filepath of the file to be uploaded :param metadata: This field is the metadata associated with the file. Description and tags are compulsory fields of metadata. :param access_token: This is user specific access token/master token. :param base_url: It is this URL `https://www.openhumans.org`. :param remote_file_info: This field is for for checking if a file with matching name and file size already exists. Its default value is none. :param project_member_id: This field is the list of project member id of all members of a project. Its default value is None. :param max_bytes: This field is the maximum file size a user can upload. It's default value is 128m. """ return upload_file(target_filepath, metadata, access_token, base_url, remote_file_info, project_member_id, max_bytes)
[ "def", "upload_aws", "(", "target_filepath", ",", "metadata", ",", "access_token", ",", "base_url", "=", "OH_BASE_URL", ",", "remote_file_info", "=", "None", ",", "project_member_id", "=", "None", ",", "max_bytes", "=", "MAX_FILE_DEFAULT", ")", ":", "return", "upload_file", "(", "target_filepath", ",", "metadata", ",", "access_token", ",", "base_url", ",", "remote_file_info", ",", "project_member_id", ",", "max_bytes", ")" ]
57.375
25.875
def stream(self, flags=0, devpath=None): "Control streaming reports from the daemon," if flags & WATCH_DISABLE: arg = '?WATCH={"enable":false' if flags & WATCH_JSON: arg += ',"json":false' if flags & WATCH_NMEA: arg += ',"nmea":false' if flags & WATCH_RARE: arg += ',"raw":1' if flags & WATCH_RAW: arg += ',"raw":2' if flags & WATCH_SCALED: arg += ',"scaled":false' if flags & WATCH_TIMING: arg += ',"timing":false' else: # flags & WATCH_ENABLE: arg = '?WATCH={"enable":true' if flags & WATCH_JSON: arg += ',"json":true' if flags & WATCH_NMEA: arg += ',"nmea":true' if flags & WATCH_RAW: arg += ',"raw":1' if flags & WATCH_RARE: arg += ',"raw":0' if flags & WATCH_SCALED: arg += ',"scaled":true' if flags & WATCH_TIMING: arg += ',"timing":true' if flags & WATCH_DEVICE: arg += ',"device":"%s"' % devpath return self.send(arg + "}")
[ "def", "stream", "(", "self", ",", "flags", "=", "0", ",", "devpath", "=", "None", ")", ":", "if", "flags", "&", "WATCH_DISABLE", ":", "arg", "=", "'?WATCH={\"enable\":false'", "if", "flags", "&", "WATCH_JSON", ":", "arg", "+=", "',\"json\":false'", "if", "flags", "&", "WATCH_NMEA", ":", "arg", "+=", "',\"nmea\":false'", "if", "flags", "&", "WATCH_RARE", ":", "arg", "+=", "',\"raw\":1'", "if", "flags", "&", "WATCH_RAW", ":", "arg", "+=", "',\"raw\":2'", "if", "flags", "&", "WATCH_SCALED", ":", "arg", "+=", "',\"scaled\":false'", "if", "flags", "&", "WATCH_TIMING", ":", "arg", "+=", "',\"timing\":false'", "else", ":", "# flags & WATCH_ENABLE:", "arg", "=", "'?WATCH={\"enable\":true'", "if", "flags", "&", "WATCH_JSON", ":", "arg", "+=", "',\"json\":true'", "if", "flags", "&", "WATCH_NMEA", ":", "arg", "+=", "',\"nmea\":true'", "if", "flags", "&", "WATCH_RAW", ":", "arg", "+=", "',\"raw\":1'", "if", "flags", "&", "WATCH_RARE", ":", "arg", "+=", "',\"raw\":0'", "if", "flags", "&", "WATCH_SCALED", ":", "arg", "+=", "',\"scaled\":true'", "if", "flags", "&", "WATCH_TIMING", ":", "arg", "+=", "',\"timing\":true'", "if", "flags", "&", "WATCH_DEVICE", ":", "arg", "+=", "',\"device\":\"%s\"'", "%", "devpath", "return", "self", ".", "send", "(", "arg", "+", "\"}\"", ")" ]
36.939394
4.515152
def time_range(self, start, end): """Add a request for a time range to the query. This modifies the query in-place, but returns `self` so that multiple queries can be chained together on one line. This replaces any existing temporal queries that have been set. Parameters ---------- start : datetime.datetime The start of the requested time range end : datetime.datetime The end of the requested time range Returns ------- self : DataQuery Returns self for chaining calls """ self._set_query(self.time_query, time_start=self._format_time(start), time_end=self._format_time(end)) return self
[ "def", "time_range", "(", "self", ",", "start", ",", "end", ")", ":", "self", ".", "_set_query", "(", "self", ".", "time_query", ",", "time_start", "=", "self", ".", "_format_time", "(", "start", ")", ",", "time_end", "=", "self", ".", "_format_time", "(", "end", ")", ")", "return", "self" ]
31
21.083333
def percent_point(self, U): """Given a cdf value, returns a value in original space. Args: U: `int` or `float` cdf value in [0,1] Returns: float: value in original space """ self.check_fit() if not 0 < U < 1: raise ValueError('cdf value must be in [0,1]') return scipy.optimize.brentq(self.cumulative_distribution, -1000.0, 1000.0, args=(U))
[ "def", "percent_point", "(", "self", ",", "U", ")", ":", "self", ".", "check_fit", "(", ")", "if", "not", "0", "<", "U", "<", "1", ":", "raise", "ValueError", "(", "'cdf value must be in [0,1]'", ")", "return", "scipy", ".", "optimize", ".", "brentq", "(", "self", ".", "cumulative_distribution", ",", "-", "1000.0", ",", "1000.0", ",", "args", "=", "(", "U", ")", ")" ]
28.2
22.533333
def exception(self, timeout=None): """Wait for the async function to complete and return its exception. If the function did not raise an exception this returns ``None``. """ if not self._done.wait(timeout): raise Timeout('timeout waiting for future') if self._state == self.S_EXCEPTION: return self._result
[ "def", "exception", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "not", "self", ".", "_done", ".", "wait", "(", "timeout", ")", ":", "raise", "Timeout", "(", "'timeout waiting for future'", ")", "if", "self", ".", "_state", "==", "self", ".", "S_EXCEPTION", ":", "return", "self", ".", "_result" ]
40.333333
11.777778
def image_name (txt): """Return the alt part of the first <img alt=""> tag in txt.""" mo = imgtag_re.search(txt) if mo: name = strformat.unquote(mo.group('name').strip()) return _unquote(name) return u''
[ "def", "image_name", "(", "txt", ")", ":", "mo", "=", "imgtag_re", ".", "search", "(", "txt", ")", "if", "mo", ":", "name", "=", "strformat", ".", "unquote", "(", "mo", ".", "group", "(", "'name'", ")", ".", "strip", "(", ")", ")", "return", "_unquote", "(", "name", ")", "return", "u''" ]
32.857143
16.142857
def contextMenuEvent(self, event): """ Add menu action: * 'Show line numbers' * 'Save to file' """ menu = QtWidgets.QPlainTextEdit.createStandardContextMenu(self) mg = self.getGlobalsMenu() a0 = menu.actions()[0] menu.insertMenu(a0, mg) menu.insertSeparator(a0) menu.addSeparator() a = QtWidgets.QAction('Show line numbers', menu) l = self.codeEditor.lineNumbers a.setCheckable(True) a.setChecked(l.isVisible()) a.triggered.connect(lambda checked: l.show() if checked else l.hide()) menu.addAction(a) menu.addSeparator() a = QtWidgets.QAction('Save to file', menu) a.triggered.connect(self.saveToFile) menu.addAction(a) menu.exec_(event.globalPos())
[ "def", "contextMenuEvent", "(", "self", ",", "event", ")", ":", "menu", "=", "QtWidgets", ".", "QPlainTextEdit", ".", "createStandardContextMenu", "(", "self", ")", "mg", "=", "self", ".", "getGlobalsMenu", "(", ")", "a0", "=", "menu", ".", "actions", "(", ")", "[", "0", "]", "menu", ".", "insertMenu", "(", "a0", ",", "mg", ")", "menu", ".", "insertSeparator", "(", "a0", ")", "menu", ".", "addSeparator", "(", ")", "a", "=", "QtWidgets", ".", "QAction", "(", "'Show line numbers'", ",", "menu", ")", "l", "=", "self", ".", "codeEditor", ".", "lineNumbers", "a", ".", "setCheckable", "(", "True", ")", "a", ".", "setChecked", "(", "l", ".", "isVisible", "(", ")", ")", "a", ".", "triggered", ".", "connect", "(", "lambda", "checked", ":", "l", ".", "show", "(", ")", "if", "checked", "else", "l", ".", "hide", "(", ")", ")", "menu", ".", "addAction", "(", "a", ")", "menu", ".", "addSeparator", "(", ")", "a", "=", "QtWidgets", ".", "QAction", "(", "'Save to file'", ",", "menu", ")", "a", ".", "triggered", ".", "connect", "(", "self", ".", "saveToFile", ")", "menu", ".", "addAction", "(", "a", ")", "menu", ".", "exec_", "(", "event", ".", "globalPos", "(", ")", ")" ]
29.740741
15.518519
def asarray(self, file=None, out=None, **kwargs): """Read image data from files and return as numpy array. The kwargs parameters are passed to the imread function. Raise IndexError or ValueError if image shapes do not match. """ if file is not None: if isinstance(file, int): return self.imread(self.files[file], **kwargs) return self.imread(file, **kwargs) im = self.imread(self.files[0], **kwargs) shape = self.shape + im.shape result = create_output(out, shape, dtype=im.dtype) result = result.reshape(-1, *im.shape) for index, fname in zip(self._indices, self.files): index = [i-j for i, j in zip(index, self._startindex)] index = numpy.ravel_multi_index(index, self.shape) im = self.imread(fname, **kwargs) result[index] = im result.shape = shape return result
[ "def", "asarray", "(", "self", ",", "file", "=", "None", ",", "out", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "file", "is", "not", "None", ":", "if", "isinstance", "(", "file", ",", "int", ")", ":", "return", "self", ".", "imread", "(", "self", ".", "files", "[", "file", "]", ",", "*", "*", "kwargs", ")", "return", "self", ".", "imread", "(", "file", ",", "*", "*", "kwargs", ")", "im", "=", "self", ".", "imread", "(", "self", ".", "files", "[", "0", "]", ",", "*", "*", "kwargs", ")", "shape", "=", "self", ".", "shape", "+", "im", ".", "shape", "result", "=", "create_output", "(", "out", ",", "shape", ",", "dtype", "=", "im", ".", "dtype", ")", "result", "=", "result", ".", "reshape", "(", "-", "1", ",", "*", "im", ".", "shape", ")", "for", "index", ",", "fname", "in", "zip", "(", "self", ".", "_indices", ",", "self", ".", "files", ")", ":", "index", "=", "[", "i", "-", "j", "for", "i", ",", "j", "in", "zip", "(", "index", ",", "self", ".", "_startindex", ")", "]", "index", "=", "numpy", ".", "ravel_multi_index", "(", "index", ",", "self", ".", "shape", ")", "im", "=", "self", ".", "imread", "(", "fname", ",", "*", "*", "kwargs", ")", "result", "[", "index", "]", "=", "im", "result", ".", "shape", "=", "shape", "return", "result" ]
38.75
17.208333
def CreateVertices(self, points): """ Returns a dictionary object with keys that are 2tuples represnting a point. """ gr = digraph() for z, x, Q in points: node = (z, x, Q) gr.add_nodes([node]) return gr
[ "def", "CreateVertices", "(", "self", ",", "points", ")", ":", "gr", "=", "digraph", "(", ")", "for", "z", ",", "x", ",", "Q", "in", "points", ":", "node", "=", "(", "z", ",", "x", ",", "Q", ")", "gr", ".", "add_nodes", "(", "[", "node", "]", ")", "return", "gr" ]
22.833333
16
def get_other_answers(pool, seeded_answers, get_student_item_dict, algo, options): """ Select other student's answers from answer pool or seeded answers based on the selection algorithm Args: pool (dict): answer pool, format: { option1_index: { student_id: { can store algorithm specific info here } }, option2_index: { student_id: { ... } } } seeded_answers (list): seeded answers from instructor [ {'answer': 0, 'rationale': 'rationale A'}, {'answer': 1, 'rationale': 'rationale B'}, ] get_student_item_dict (callable): get student item dict function to return student item dict algo (str): selection algorithm options (dict): answer options for the question Returns: dict: answers based on the selection algorithm """ # "#" means the number of responses returned should be the same as the number of options. num_responses = len(options) \ if 'num_responses' not in algo or algo['num_responses'] == "#" \ else int(algo['num_responses']) if algo['name'] == 'simple': return get_other_answers_simple(pool, seeded_answers, get_student_item_dict, num_responses) elif algo['name'] == 'random': return get_other_answers_random(pool, seeded_answers, get_student_item_dict, num_responses) else: raise UnknownChooseAnswerAlgorithm()
[ "def", "get_other_answers", "(", "pool", ",", "seeded_answers", ",", "get_student_item_dict", ",", "algo", ",", "options", ")", ":", "# \"#\" means the number of responses returned should be the same as the number of options.", "num_responses", "=", "len", "(", "options", ")", "if", "'num_responses'", "not", "in", "algo", "or", "algo", "[", "'num_responses'", "]", "==", "\"#\"", "else", "int", "(", "algo", "[", "'num_responses'", "]", ")", "if", "algo", "[", "'name'", "]", "==", "'simple'", ":", "return", "get_other_answers_simple", "(", "pool", ",", "seeded_answers", ",", "get_student_item_dict", ",", "num_responses", ")", "elif", "algo", "[", "'name'", "]", "==", "'random'", ":", "return", "get_other_answers_random", "(", "pool", ",", "seeded_answers", ",", "get_student_item_dict", ",", "num_responses", ")", "else", ":", "raise", "UnknownChooseAnswerAlgorithm", "(", ")" ]
40.648649
24.162162
def Hooper2K(Di, Re, name=None, K1=None, Kinfty=None): r'''Returns loss coefficient for any various fittings, depending on the name input. Alternatively, the Hooper constants K1, Kinfty may be provided and used instead. Source of data is [1]_. Reviews of this model are favorable less favorable than the Darby method but superior to the constant-K method. .. math:: K = \frac{K_1}{Re} + K_\infty\left(1 + \frac{1\text{ inch}}{D_{in}}\right) Note this model uses actual inside pipe diameter in inches. Parameters ---------- Di : float Actual inside diameter of the pipe, [in] Re : float Reynolds number, [-] name : str, optional String from Hooper dict representing a fitting K1 : float, optional K1 parameter of Hooper model, optional [-] Kinfty : float, optional Kinfty parameter of Hooper model, optional [-] Returns ------- K : float Loss coefficient [-] Notes ----- Also described in Ludwig's Applied Process Design. Relatively uncommon to see it used. No actual example found. Examples -------- >>> Hooper2K(Di=2., Re=10000., name='Valve, Globe, Standard') 6.15 >>> Hooper2K(Di=2., Re=10000., K1=900, Kinfty=4) 6.09 References ---------- .. [1] Hooper, W. B., "The 2-K Method Predicts Head Losses in Pipe Fittings," Chem. Eng., p. 97, Aug. 24 (1981). .. [2] Hooper, William B. "Calculate Head Loss Caused by Change in Pipe Size." Chemical Engineering 95, no. 16 (November 7, 1988): 89. .. [3] Kayode Coker. Ludwig's Applied Process Design for Chemical and Petrochemical Plants. 4E. Amsterdam ; Boston: Gulf Professional Publishing, 2007. ''' if name: if name in Hooper: d = Hooper[name] K1, Kinfty = d['K1'], d['Kinfty'] else: raise Exception('Name of fitting not in list') elif K1 and Kinfty: pass else: raise Exception('Name of fitting or constants are required') return K1/Re + Kinfty*(1. + 1./Di)
[ "def", "Hooper2K", "(", "Di", ",", "Re", ",", "name", "=", "None", ",", "K1", "=", "None", ",", "Kinfty", "=", "None", ")", ":", "if", "name", ":", "if", "name", "in", "Hooper", ":", "d", "=", "Hooper", "[", "name", "]", "K1", ",", "Kinfty", "=", "d", "[", "'K1'", "]", ",", "d", "[", "'Kinfty'", "]", "else", ":", "raise", "Exception", "(", "'Name of fitting not in list'", ")", "elif", "K1", "and", "Kinfty", ":", "pass", "else", ":", "raise", "Exception", "(", "'Name of fitting or constants are required'", ")", "return", "K1", "/", "Re", "+", "Kinfty", "*", "(", "1.", "+", "1.", "/", "Di", ")" ]
32.078125
23.921875
def remove(self, uid, filename=None): """Removes an address to the Abook addressbook uid -- UID of the entry to remove """ book = ConfigParser(default_section='format') with self._lock: book.read(self._filename) del book[uid.split('@')[0]] with open(self._filename, 'w') as fp: book.write(fp, False)
[ "def", "remove", "(", "self", ",", "uid", ",", "filename", "=", "None", ")", ":", "book", "=", "ConfigParser", "(", "default_section", "=", "'format'", ")", "with", "self", ".", "_lock", ":", "book", ".", "read", "(", "self", ".", "_filename", ")", "del", "book", "[", "uid", ".", "split", "(", "'@'", ")", "[", "0", "]", "]", "with", "open", "(", "self", ".", "_filename", ",", "'w'", ")", "as", "fp", ":", "book", ".", "write", "(", "fp", ",", "False", ")" ]
38.2
4.9
def normalize_curves_eb(curves): """ A more sophisticated version of normalize_curves, used in the event based calculator. :param curves: a list of pairs (losses, poes) :returns: first losses, all_poes """ # we assume non-decreasing losses, so losses[-1] is the maximum loss non_zero_curves = [(losses, poes) for losses, poes in curves if losses[-1] > 0] if not non_zero_curves: # no damage. all zero curves return curves[0][0], numpy.array([poes for _losses, poes in curves]) else: # standard case max_losses = [losses[-1] for losses, _poes in non_zero_curves] reference_curve = non_zero_curves[numpy.argmax(max_losses)] loss_ratios = reference_curve[0] curves_poes = [interpolate.interp1d( losses, poes, bounds_error=False, fill_value=0)(loss_ratios) for losses, poes in curves] # fix degenerated case with flat curve for cp in curves_poes: if numpy.isnan(cp[0]): cp[0] = 0 return loss_ratios, numpy.array(curves_poes)
[ "def", "normalize_curves_eb", "(", "curves", ")", ":", "# we assume non-decreasing losses, so losses[-1] is the maximum loss", "non_zero_curves", "=", "[", "(", "losses", ",", "poes", ")", "for", "losses", ",", "poes", "in", "curves", "if", "losses", "[", "-", "1", "]", ">", "0", "]", "if", "not", "non_zero_curves", ":", "# no damage. all zero curves", "return", "curves", "[", "0", "]", "[", "0", "]", ",", "numpy", ".", "array", "(", "[", "poes", "for", "_losses", ",", "poes", "in", "curves", "]", ")", "else", ":", "# standard case", "max_losses", "=", "[", "losses", "[", "-", "1", "]", "for", "losses", ",", "_poes", "in", "non_zero_curves", "]", "reference_curve", "=", "non_zero_curves", "[", "numpy", ".", "argmax", "(", "max_losses", ")", "]", "loss_ratios", "=", "reference_curve", "[", "0", "]", "curves_poes", "=", "[", "interpolate", ".", "interp1d", "(", "losses", ",", "poes", ",", "bounds_error", "=", "False", ",", "fill_value", "=", "0", ")", "(", "loss_ratios", ")", "for", "losses", ",", "poes", "in", "curves", "]", "# fix degenerated case with flat curve", "for", "cp", "in", "curves_poes", ":", "if", "numpy", ".", "isnan", "(", "cp", "[", "0", "]", ")", ":", "cp", "[", "0", "]", "=", "0", "return", "loss_ratios", ",", "numpy", ".", "array", "(", "curves_poes", ")" ]
42.96
15.2
def decode_key(cls, pubkey_content): """Decode base64 coded part of the key.""" try: decoded_key = base64.b64decode(pubkey_content.encode("ascii")) except (TypeError, binascii.Error): raise MalformedDataError("Unable to decode the key") return decoded_key
[ "def", "decode_key", "(", "cls", ",", "pubkey_content", ")", ":", "try", ":", "decoded_key", "=", "base64", ".", "b64decode", "(", "pubkey_content", ".", "encode", "(", "\"ascii\"", ")", ")", "except", "(", "TypeError", ",", "binascii", ".", "Error", ")", ":", "raise", "MalformedDataError", "(", "\"Unable to decode the key\"", ")", "return", "decoded_key" ]
43.571429
15.285714
def _build_tree(value_and_gradients_fn, current_state, current_target_log_prob, current_grads_target_log_prob, current_momentum, direction, depth, step_size, log_slice_sample, max_simulation_error=1000., seed=None): """Builds a tree at a given tree depth and at a given state. The `current` state is immediately adjacent to, but outside of, the subtrajectory spanned by the returned `forward` and `reverse` states. Args: value_and_gradients_fn: Python callable which takes an argument like `*current_state` and returns a tuple of its (possibly unnormalized) log-density under the target distribution and its gradient with respect to each state. current_state: List of `Tensor`s representing the current states of the NUTS trajectory. current_target_log_prob: Scalar `Tensor` representing the value of `target_log_prob_fn` at the `current_state`. current_grads_target_log_prob: List of `Tensor`s representing gradient of `current_target_log_prob` with respect to `current_state`. Must have same shape as `current_state`. current_momentum: List of `Tensor`s representing the momentums of `current_state`. Must have same shape as `current_state`. direction: int that is either -1 or 1. It determines whether to perform leapfrog integration backwards (reverse) or forward in time respectively. depth: non-negative int that indicates how deep of a tree to build. Each call to `_build_tree` takes `2**depth` leapfrog steps. step_size: List of `Tensor`s representing the step sizes for the leapfrog integrator. Must have same shape as `current_state`. log_slice_sample: The log of an auxiliary slice variable. It is used together with `max_simulation_error` to avoid simulating trajectories with too much numerical error. max_simulation_error: Maximum simulation error to tolerate before terminating the trajectory. Simulation error is the `log_slice_sample` minus the log-joint probability at the simulated state. seed: Integer to seed the random number generator. Returns: reverse_state: List of `Tensor`s representing the "reverse" states of the NUTS trajectory. Has same shape as `current_state`. reverse_target_log_prob: Scalar `Tensor` representing the value of `target_log_prob_fn` at the `reverse_state`. reverse_grads_target_log_prob: List of `Tensor`s representing gradient of `reverse_target_log_prob` with respect to `reverse_state`. Has same shape as `reverse_state`. reverse_momentum: List of `Tensor`s representing the momentums of `reverse_state`. Has same shape as `reverse_state`. forward_state: List of `Tensor`s representing the "forward" states of the NUTS trajectory. Has same shape as `current_state`. forward_target_log_prob: Scalar `Tensor` representing the value of `target_log_prob_fn` at the `forward_state`. forward_grads_target_log_prob: List of `Tensor`s representing gradient of `forward_target_log_prob` with respect to `forward_state`. Has same shape as `forward_state`. forward_momentum: List of `Tensor`s representing the momentums of `forward_state`. Has same shape as `forward_state`. next_state: List of `Tensor`s representing the next states of the NUTS trajectory. Has same shape as `current_state`. next_target_log_prob: Scalar `Tensor` representing the value of `target_log_prob_fn` at `next_state`. next_grads_target_log_prob: List of `Tensor`s representing the gradient of `next_target_log_prob` with respect to `next_state`. num_states: Number of acceptable candidate states in the subtree. A state is acceptable if it is "in the slice", that is, if its log-joint probability with its momentum is greater than `log_slice_sample`. continue_trajectory: bool determining whether to continue the simulation trajectory. The trajectory is continued if no U-turns are encountered within the built subtree, and if the log-probability accumulation due to integration error does not exceed `max_simulation_error`. """ if depth == 0: # base case # Take a leapfrog step. Terminate the tree-building if the simulation # error from the leapfrog integrator is too large. States discovered by # continuing the simulation are likely to have very low probability. [ next_state, next_target_log_prob, next_grads_target_log_prob, next_momentum, ] = _leapfrog( value_and_gradients_fn=value_and_gradients_fn, current_state=current_state, current_grads_target_log_prob=current_grads_target_log_prob, current_momentum=current_momentum, step_size=direction * step_size) next_log_joint = _log_joint(next_target_log_prob, next_momentum) num_states = tf.cast(next_log_joint > log_slice_sample, dtype=tf.int32) continue_trajectory = (next_log_joint > log_slice_sample - max_simulation_error) return [ next_state, next_target_log_prob, next_grads_target_log_prob, next_momentum, next_state, next_target_log_prob, next_grads_target_log_prob, next_momentum, next_state, next_target_log_prob, next_grads_target_log_prob, num_states, continue_trajectory, ] # Build a tree at the current state. seed_stream = tfd.SeedStream(seed, "build_tree") [ reverse_state, reverse_target_log_prob, reverse_grads_target_log_prob, reverse_momentum, forward_state, forward_target_log_prob, forward_grads_target_log_prob, forward_momentum, next_state, next_target_log_prob, next_grads_target_log_prob, num_states, continue_trajectory, ] = _build_tree(value_and_gradients_fn=value_and_gradients_fn, current_state=current_state, current_target_log_prob=current_target_log_prob, current_grads_target_log_prob=current_grads_target_log_prob, current_momentum=current_momentum, direction=direction, depth=depth - 1, step_size=step_size, log_slice_sample=log_slice_sample, seed=seed_stream()) if continue_trajectory: # If the just-built subtree did not terminate, build a second subtree at # the forward or reverse state, as appropriate. if direction < 0: [ reverse_state, reverse_target_log_prob, reverse_grads_target_log_prob, reverse_momentum, _, _, _, _, far_state, far_target_log_prob, far_grads_target_log_prob, far_num_states, far_continue_trajectory, ] = _build_tree( value_and_gradients_fn=value_and_gradients_fn, current_state=reverse_state, current_target_log_prob=reverse_target_log_prob, current_grads_target_log_prob=reverse_grads_target_log_prob, current_momentum=reverse_momentum, direction=direction, depth=depth - 1, step_size=step_size, log_slice_sample=log_slice_sample, seed=seed_stream()) else: [ _, _, _, _, forward_state, forward_target_log_prob, forward_grads_target_log_prob, forward_momentum, far_state, far_target_log_prob, far_grads_target_log_prob, far_num_states, far_continue_trajectory, ] = _build_tree( value_and_gradients_fn=value_and_gradients_fn, current_state=forward_state, current_target_log_prob=forward_target_log_prob, current_grads_target_log_prob=forward_grads_target_log_prob, current_momentum=forward_momentum, direction=direction, depth=depth - 1, step_size=step_size, log_slice_sample=log_slice_sample, seed=seed_stream()) # Propose either `next_state` (which came from the first subtree and so is # nearby) or the new forward/reverse state (which came from the second # subtree and so is far away). num_states += far_num_states accept_far_state = _random_bernoulli( [], probs=far_num_states / num_states, dtype=tf.bool, seed=seed_stream()) if accept_far_state: next_state = far_state next_target_log_prob = far_target_log_prob next_grads_target_log_prob = far_grads_target_log_prob # Continue the NUTS trajectory if the far subtree did not terminate either, # and if the reverse-most and forward-most states do not exhibit a U-turn. has_no_u_turn = tf.logical_and( _has_no_u_turn(forward_state, reverse_state, forward_momentum), _has_no_u_turn(forward_state, reverse_state, reverse_momentum)) continue_trajectory = far_continue_trajectory and has_no_u_turn return [ reverse_state, reverse_target_log_prob, reverse_grads_target_log_prob, reverse_momentum, forward_state, forward_target_log_prob, forward_grads_target_log_prob, forward_momentum, next_state, next_target_log_prob, next_grads_target_log_prob, num_states, continue_trajectory, ]
[ "def", "_build_tree", "(", "value_and_gradients_fn", ",", "current_state", ",", "current_target_log_prob", ",", "current_grads_target_log_prob", ",", "current_momentum", ",", "direction", ",", "depth", ",", "step_size", ",", "log_slice_sample", ",", "max_simulation_error", "=", "1000.", ",", "seed", "=", "None", ")", ":", "if", "depth", "==", "0", ":", "# base case", "# Take a leapfrog step. Terminate the tree-building if the simulation", "# error from the leapfrog integrator is too large. States discovered by", "# continuing the simulation are likely to have very low probability.", "[", "next_state", ",", "next_target_log_prob", ",", "next_grads_target_log_prob", ",", "next_momentum", ",", "]", "=", "_leapfrog", "(", "value_and_gradients_fn", "=", "value_and_gradients_fn", ",", "current_state", "=", "current_state", ",", "current_grads_target_log_prob", "=", "current_grads_target_log_prob", ",", "current_momentum", "=", "current_momentum", ",", "step_size", "=", "direction", "*", "step_size", ")", "next_log_joint", "=", "_log_joint", "(", "next_target_log_prob", ",", "next_momentum", ")", "num_states", "=", "tf", ".", "cast", "(", "next_log_joint", ">", "log_slice_sample", ",", "dtype", "=", "tf", ".", "int32", ")", "continue_trajectory", "=", "(", "next_log_joint", ">", "log_slice_sample", "-", "max_simulation_error", ")", "return", "[", "next_state", ",", "next_target_log_prob", ",", "next_grads_target_log_prob", ",", "next_momentum", ",", "next_state", ",", "next_target_log_prob", ",", "next_grads_target_log_prob", ",", "next_momentum", ",", "next_state", ",", "next_target_log_prob", ",", "next_grads_target_log_prob", ",", "num_states", ",", "continue_trajectory", ",", "]", "# Build a tree at the current state.", "seed_stream", "=", "tfd", ".", "SeedStream", "(", "seed", ",", "\"build_tree\"", ")", "[", "reverse_state", ",", "reverse_target_log_prob", ",", "reverse_grads_target_log_prob", ",", "reverse_momentum", ",", "forward_state", ",", "forward_target_log_prob", ",", "forward_grads_target_log_prob", ",", "forward_momentum", ",", "next_state", ",", "next_target_log_prob", ",", "next_grads_target_log_prob", ",", "num_states", ",", "continue_trajectory", ",", "]", "=", "_build_tree", "(", "value_and_gradients_fn", "=", "value_and_gradients_fn", ",", "current_state", "=", "current_state", ",", "current_target_log_prob", "=", "current_target_log_prob", ",", "current_grads_target_log_prob", "=", "current_grads_target_log_prob", ",", "current_momentum", "=", "current_momentum", ",", "direction", "=", "direction", ",", "depth", "=", "depth", "-", "1", ",", "step_size", "=", "step_size", ",", "log_slice_sample", "=", "log_slice_sample", ",", "seed", "=", "seed_stream", "(", ")", ")", "if", "continue_trajectory", ":", "# If the just-built subtree did not terminate, build a second subtree at", "# the forward or reverse state, as appropriate.", "if", "direction", "<", "0", ":", "[", "reverse_state", ",", "reverse_target_log_prob", ",", "reverse_grads_target_log_prob", ",", "reverse_momentum", ",", "_", ",", "_", ",", "_", ",", "_", ",", "far_state", ",", "far_target_log_prob", ",", "far_grads_target_log_prob", ",", "far_num_states", ",", "far_continue_trajectory", ",", "]", "=", "_build_tree", "(", "value_and_gradients_fn", "=", "value_and_gradients_fn", ",", "current_state", "=", "reverse_state", ",", "current_target_log_prob", "=", "reverse_target_log_prob", ",", "current_grads_target_log_prob", "=", "reverse_grads_target_log_prob", ",", "current_momentum", "=", "reverse_momentum", ",", "direction", "=", "direction", ",", "depth", "=", "depth", "-", "1", ",", "step_size", "=", "step_size", ",", "log_slice_sample", "=", "log_slice_sample", ",", "seed", "=", "seed_stream", "(", ")", ")", "else", ":", "[", "_", ",", "_", ",", "_", ",", "_", ",", "forward_state", ",", "forward_target_log_prob", ",", "forward_grads_target_log_prob", ",", "forward_momentum", ",", "far_state", ",", "far_target_log_prob", ",", "far_grads_target_log_prob", ",", "far_num_states", ",", "far_continue_trajectory", ",", "]", "=", "_build_tree", "(", "value_and_gradients_fn", "=", "value_and_gradients_fn", ",", "current_state", "=", "forward_state", ",", "current_target_log_prob", "=", "forward_target_log_prob", ",", "current_grads_target_log_prob", "=", "forward_grads_target_log_prob", ",", "current_momentum", "=", "forward_momentum", ",", "direction", "=", "direction", ",", "depth", "=", "depth", "-", "1", ",", "step_size", "=", "step_size", ",", "log_slice_sample", "=", "log_slice_sample", ",", "seed", "=", "seed_stream", "(", ")", ")", "# Propose either `next_state` (which came from the first subtree and so is", "# nearby) or the new forward/reverse state (which came from the second", "# subtree and so is far away).", "num_states", "+=", "far_num_states", "accept_far_state", "=", "_random_bernoulli", "(", "[", "]", ",", "probs", "=", "far_num_states", "/", "num_states", ",", "dtype", "=", "tf", ".", "bool", ",", "seed", "=", "seed_stream", "(", ")", ")", "if", "accept_far_state", ":", "next_state", "=", "far_state", "next_target_log_prob", "=", "far_target_log_prob", "next_grads_target_log_prob", "=", "far_grads_target_log_prob", "# Continue the NUTS trajectory if the far subtree did not terminate either,", "# and if the reverse-most and forward-most states do not exhibit a U-turn.", "has_no_u_turn", "=", "tf", ".", "logical_and", "(", "_has_no_u_turn", "(", "forward_state", ",", "reverse_state", ",", "forward_momentum", ")", ",", "_has_no_u_turn", "(", "forward_state", ",", "reverse_state", ",", "reverse_momentum", ")", ")", "continue_trajectory", "=", "far_continue_trajectory", "and", "has_no_u_turn", "return", "[", "reverse_state", ",", "reverse_target_log_prob", ",", "reverse_grads_target_log_prob", ",", "reverse_momentum", ",", "forward_state", ",", "forward_target_log_prob", ",", "forward_grads_target_log_prob", ",", "forward_momentum", ",", "next_state", ",", "next_target_log_prob", ",", "next_grads_target_log_prob", ",", "num_states", ",", "continue_trajectory", ",", "]" ]
40.865217
19.53913
def writeColorLUT2(config, outfile=None, isochrone=None, distance_modulus_array=None, delta_mag=None, mag_err_array=None, mass_steps=10000, plot=False): """ Precompute a 4-dimensional signal color probability look-up table to speed up the likelihood evaluation. Inputs are a Config object (or file name), an Isochrone object, an array of distance moduli at which to evaluate the signal color probability, and an array of magnitude uncertainties which set the bin edges of those dimensions (zero implicity included). Finally there is an outfile name. """ if plot: import ugali.utils.plotting if type(config) == str: config = ugali.utils.config.Config(config) if outfile is None: outfile = config.params['color_lut']['filename'] if isochrone is None: isochrones = [] for ii, name in enumerate(config.params['isochrone']['infiles']): isochrones.append(ugali.isochrone.Isochrone(config, name)) isochrone = ugali.isochrone.CompositeIsochrone(isochrones, config.params['isochrone']['weights']) if distance_modulus_array is None: distance_modulus_array = config.params['color_lut']['distance_modulus_array'] if delta_mag is None: delta_mag = config.params['color_lut']['delta_mag'] if mag_err_array is None: mag_err_array = config.params['color_lut']['mag_err_array'] mag_buffer = 0.5 # Safety buffer in magnitudes around the color-magnitude space defined by the ROI epsilon = 1.e-10 if config.params['catalog']['band_1_detection']: bins_mag_1 = numpy.arange(config.params['mag']['min'] - mag_buffer, config.params['mag']['max'] + mag_buffer + epsilon, delta_mag) bins_mag_2 = numpy.arange(config.params['mag']['min'] - config.params['color']['max'] - mag_buffer, config.params['mag']['max'] - config.params['color']['min'] + mag_buffer + epsilon, delta_mag) else: bins_mag_1 = numpy.arange(config.params['mag']['min'] + config.params['color']['min'] - mag_buffer, config.params['mag']['max'] + config.params['color']['max'] + mag_buffer + epsilon, delta_mag) bins_mag_2 = numpy.arange(config.params['mag']['min'] - mag_buffer, config.params['mag']['max'] + mag_buffer + epsilon, delta_mag) # Output binning configuration #print config.params['catalog']['band_1_detection'] #print config.params['mag']['min'], config.params['mag']['max'] #print config.params['color']['min'], config.params['color']['max'] #print bins_mag_1[0], bins_mag_1[-1], len(bins_mag_1) #print bins_mag_2[0], bins_mag_2[-1], len(bins_mag_2) isochrone_mass_init, isochrone_mass_pdf, isochrone_mass_act, isochrone_mag_1, isochrone_mag_2 = isochrone.sample(mass_steps=mass_steps) hdul = pyfits.HDUList() for index_distance_modulus, distance_modulus in enumerate(distance_modulus_array): logger.debug('(%i/%i)'%(index_distance_modulus, len(distance_modulus_array))) columns_array = [] time_start = time.time() histo_isochrone_pdf = numpy.histogram2d(distance_modulus + isochrone_mag_1, distance_modulus + isochrone_mag_2, bins=[bins_mag_1, bins_mag_2], weights=isochrone_mass_pdf)[0] if plot: # Checked that axis are plotted correctly ugali.utils.plotting.twoDimensionalHistogram('Isochrone', 'mag_1', 'mag_2', numpy.log10(histo_isochrone_pdf + epsilon).transpose(), bins_mag_1, bins_mag_2, lim_x=None, lim_y=None, vmin=None, vmax=None) for index_mag_err_1, mag_err_1 in enumerate(mag_err_array): for index_mag_err_2, mag_err_2 in enumerate(mag_err_array): logger.debug(' Distance modulus = %.2f mag_err_1 = %.2f mag_err_2 = %.2f'%(distance_modulus, mag_err_1, mag_err_2)) mag_1_sigma_step = delta_mag / mag_err_1 n = int(numpy.ceil(4. / mag_1_sigma_step)) mag_1_sigma = numpy.arange(-1. * (n + 0.5) * mag_1_sigma_step, ((n + 0.5) * mag_1_sigma_step) + epsilon, mag_1_sigma_step) mag_1_pdf_array = scipy.stats.norm.cdf(mag_1_sigma[1:]) - scipy.stats.norm.cdf(mag_1_sigma[0:-1]) mag_2_sigma_step = delta_mag / mag_err_2 n = int(numpy.ceil(4. / mag_2_sigma_step)) mag_2_sigma = numpy.arange(-1. * (n + 0.5) * mag_2_sigma_step, ((n + 0.5) * mag_2_sigma_step) + epsilon, mag_2_sigma_step) mag_2_pdf_array = scipy.stats.norm.cdf(mag_2_sigma[1:]) - scipy.stats.norm.cdf(mag_2_sigma[0:-1]) mag_1_pdf, mag_2_pdf = numpy.meshgrid(mag_2_pdf_array, mag_1_pdf_array) pdf = mag_1_pdf * mag_2_pdf histo_isochrone_pdf_convolve = scipy.signal.convolve2d(histo_isochrone_pdf, pdf, mode='same') if plot: # Checked that axis are plotted correctly ugali.utils.plotting.twoDimensionalHistogram('Convolved Isochrone', 'mag_1', 'mag_2', numpy.log10(histo_isochrone_pdf_convolve + epsilon).transpose(), bins_mag_1, bins_mag_2, lim_x=None, lim_y=None, vmin=None, vmax=None) columns_array.append(pyfits.Column(name = '%i%i'%(index_mag_err_1, index_mag_err_2), format = '%iE'%(histo_isochrone_pdf_convolve.shape[1]), array = histo_isochrone_pdf_convolve)) hdu = pyfits.new_table(columns_array) hdu.header.update('DIST_MOD', distance_modulus) hdu.name = '%.2f'%(distance_modulus) hdul.append(hdu) time_end = time.time() logger.debug('%.2f s'%(time_end - time_start)) # Store distance modulus info columns_array = [pyfits.Column(name = 'DISTANCE_MODULUS', format = 'E', array = distance_modulus_array)] hdu = pyfits.new_table(columns_array) hdu.name = 'DISTANCE_MODULUS' hdul.append(hdu) # Store magnitude error info columns_array = [pyfits.Column(name = 'BINS_MAG_ERR', format = 'E', array = numpy.insert(mag_err_array, 0, 0.))] hdu = pyfits.new_table(columns_array) hdu.name = 'BINS_MAG_ERR' hdul.append(hdu) # Store magnitude 1 info columns_array = [pyfits.Column(name = 'BINS_MAG_1', format = 'E', array = bins_mag_1)] hdu = pyfits.new_table(columns_array) hdu.name = 'BINS_MAG_1' hdul.append(hdu) # Store magnitude 2 info columns_array = [pyfits.Column(name = 'BINS_MAG_2', format = 'E', array = bins_mag_2)] hdu = pyfits.new_table(columns_array) hdu.name = 'BINS_MAG_2' hdul.append(hdu) logger.info('Writing look-up table to %s'%(outfile)) hdul.writeto(outfile, clobber = True)
[ "def", "writeColorLUT2", "(", "config", ",", "outfile", "=", "None", ",", "isochrone", "=", "None", ",", "distance_modulus_array", "=", "None", ",", "delta_mag", "=", "None", ",", "mag_err_array", "=", "None", ",", "mass_steps", "=", "10000", ",", "plot", "=", "False", ")", ":", "if", "plot", ":", "import", "ugali", ".", "utils", ".", "plotting", "if", "type", "(", "config", ")", "==", "str", ":", "config", "=", "ugali", ".", "utils", ".", "config", ".", "Config", "(", "config", ")", "if", "outfile", "is", "None", ":", "outfile", "=", "config", ".", "params", "[", "'color_lut'", "]", "[", "'filename'", "]", "if", "isochrone", "is", "None", ":", "isochrones", "=", "[", "]", "for", "ii", ",", "name", "in", "enumerate", "(", "config", ".", "params", "[", "'isochrone'", "]", "[", "'infiles'", "]", ")", ":", "isochrones", ".", "append", "(", "ugali", ".", "isochrone", ".", "Isochrone", "(", "config", ",", "name", ")", ")", "isochrone", "=", "ugali", ".", "isochrone", ".", "CompositeIsochrone", "(", "isochrones", ",", "config", ".", "params", "[", "'isochrone'", "]", "[", "'weights'", "]", ")", "if", "distance_modulus_array", "is", "None", ":", "distance_modulus_array", "=", "config", ".", "params", "[", "'color_lut'", "]", "[", "'distance_modulus_array'", "]", "if", "delta_mag", "is", "None", ":", "delta_mag", "=", "config", ".", "params", "[", "'color_lut'", "]", "[", "'delta_mag'", "]", "if", "mag_err_array", "is", "None", ":", "mag_err_array", "=", "config", ".", "params", "[", "'color_lut'", "]", "[", "'mag_err_array'", "]", "mag_buffer", "=", "0.5", "# Safety buffer in magnitudes around the color-magnitude space defined by the ROI", "epsilon", "=", "1.e-10", "if", "config", ".", "params", "[", "'catalog'", "]", "[", "'band_1_detection'", "]", ":", "bins_mag_1", "=", "numpy", ".", "arange", "(", "config", ".", "params", "[", "'mag'", "]", "[", "'min'", "]", "-", "mag_buffer", ",", "config", ".", "params", "[", "'mag'", "]", "[", "'max'", "]", "+", "mag_buffer", "+", "epsilon", ",", "delta_mag", ")", "bins_mag_2", "=", "numpy", ".", "arange", "(", "config", ".", "params", "[", "'mag'", "]", "[", "'min'", "]", "-", "config", ".", "params", "[", "'color'", "]", "[", "'max'", "]", "-", "mag_buffer", ",", "config", ".", "params", "[", "'mag'", "]", "[", "'max'", "]", "-", "config", ".", "params", "[", "'color'", "]", "[", "'min'", "]", "+", "mag_buffer", "+", "epsilon", ",", "delta_mag", ")", "else", ":", "bins_mag_1", "=", "numpy", ".", "arange", "(", "config", ".", "params", "[", "'mag'", "]", "[", "'min'", "]", "+", "config", ".", "params", "[", "'color'", "]", "[", "'min'", "]", "-", "mag_buffer", ",", "config", ".", "params", "[", "'mag'", "]", "[", "'max'", "]", "+", "config", ".", "params", "[", "'color'", "]", "[", "'max'", "]", "+", "mag_buffer", "+", "epsilon", ",", "delta_mag", ")", "bins_mag_2", "=", "numpy", ".", "arange", "(", "config", ".", "params", "[", "'mag'", "]", "[", "'min'", "]", "-", "mag_buffer", ",", "config", ".", "params", "[", "'mag'", "]", "[", "'max'", "]", "+", "mag_buffer", "+", "epsilon", ",", "delta_mag", ")", "# Output binning configuration", "#print config.params['catalog']['band_1_detection']", "#print config.params['mag']['min'], config.params['mag']['max']", "#print config.params['color']['min'], config.params['color']['max']", "#print bins_mag_1[0], bins_mag_1[-1], len(bins_mag_1)", "#print bins_mag_2[0], bins_mag_2[-1], len(bins_mag_2)", "isochrone_mass_init", ",", "isochrone_mass_pdf", ",", "isochrone_mass_act", ",", "isochrone_mag_1", ",", "isochrone_mag_2", "=", "isochrone", ".", "sample", "(", "mass_steps", "=", "mass_steps", ")", "hdul", "=", "pyfits", ".", "HDUList", "(", ")", "for", "index_distance_modulus", ",", "distance_modulus", "in", "enumerate", "(", "distance_modulus_array", ")", ":", "logger", ".", "debug", "(", "'(%i/%i)'", "%", "(", "index_distance_modulus", ",", "len", "(", "distance_modulus_array", ")", ")", ")", "columns_array", "=", "[", "]", "time_start", "=", "time", ".", "time", "(", ")", "histo_isochrone_pdf", "=", "numpy", ".", "histogram2d", "(", "distance_modulus", "+", "isochrone_mag_1", ",", "distance_modulus", "+", "isochrone_mag_2", ",", "bins", "=", "[", "bins_mag_1", ",", "bins_mag_2", "]", ",", "weights", "=", "isochrone_mass_pdf", ")", "[", "0", "]", "if", "plot", ":", "# Checked that axis are plotted correctly", "ugali", ".", "utils", ".", "plotting", ".", "twoDimensionalHistogram", "(", "'Isochrone'", ",", "'mag_1'", ",", "'mag_2'", ",", "numpy", ".", "log10", "(", "histo_isochrone_pdf", "+", "epsilon", ")", ".", "transpose", "(", ")", ",", "bins_mag_1", ",", "bins_mag_2", ",", "lim_x", "=", "None", ",", "lim_y", "=", "None", ",", "vmin", "=", "None", ",", "vmax", "=", "None", ")", "for", "index_mag_err_1", ",", "mag_err_1", "in", "enumerate", "(", "mag_err_array", ")", ":", "for", "index_mag_err_2", ",", "mag_err_2", "in", "enumerate", "(", "mag_err_array", ")", ":", "logger", ".", "debug", "(", "' Distance modulus = %.2f mag_err_1 = %.2f mag_err_2 = %.2f'", "%", "(", "distance_modulus", ",", "mag_err_1", ",", "mag_err_2", ")", ")", "mag_1_sigma_step", "=", "delta_mag", "/", "mag_err_1", "n", "=", "int", "(", "numpy", ".", "ceil", "(", "4.", "/", "mag_1_sigma_step", ")", ")", "mag_1_sigma", "=", "numpy", ".", "arange", "(", "-", "1.", "*", "(", "n", "+", "0.5", ")", "*", "mag_1_sigma_step", ",", "(", "(", "n", "+", "0.5", ")", "*", "mag_1_sigma_step", ")", "+", "epsilon", ",", "mag_1_sigma_step", ")", "mag_1_pdf_array", "=", "scipy", ".", "stats", ".", "norm", ".", "cdf", "(", "mag_1_sigma", "[", "1", ":", "]", ")", "-", "scipy", ".", "stats", ".", "norm", ".", "cdf", "(", "mag_1_sigma", "[", "0", ":", "-", "1", "]", ")", "mag_2_sigma_step", "=", "delta_mag", "/", "mag_err_2", "n", "=", "int", "(", "numpy", ".", "ceil", "(", "4.", "/", "mag_2_sigma_step", ")", ")", "mag_2_sigma", "=", "numpy", ".", "arange", "(", "-", "1.", "*", "(", "n", "+", "0.5", ")", "*", "mag_2_sigma_step", ",", "(", "(", "n", "+", "0.5", ")", "*", "mag_2_sigma_step", ")", "+", "epsilon", ",", "mag_2_sigma_step", ")", "mag_2_pdf_array", "=", "scipy", ".", "stats", ".", "norm", ".", "cdf", "(", "mag_2_sigma", "[", "1", ":", "]", ")", "-", "scipy", ".", "stats", ".", "norm", ".", "cdf", "(", "mag_2_sigma", "[", "0", ":", "-", "1", "]", ")", "mag_1_pdf", ",", "mag_2_pdf", "=", "numpy", ".", "meshgrid", "(", "mag_2_pdf_array", ",", "mag_1_pdf_array", ")", "pdf", "=", "mag_1_pdf", "*", "mag_2_pdf", "histo_isochrone_pdf_convolve", "=", "scipy", ".", "signal", ".", "convolve2d", "(", "histo_isochrone_pdf", ",", "pdf", ",", "mode", "=", "'same'", ")", "if", "plot", ":", "# Checked that axis are plotted correctly", "ugali", ".", "utils", ".", "plotting", ".", "twoDimensionalHistogram", "(", "'Convolved Isochrone'", ",", "'mag_1'", ",", "'mag_2'", ",", "numpy", ".", "log10", "(", "histo_isochrone_pdf_convolve", "+", "epsilon", ")", ".", "transpose", "(", ")", ",", "bins_mag_1", ",", "bins_mag_2", ",", "lim_x", "=", "None", ",", "lim_y", "=", "None", ",", "vmin", "=", "None", ",", "vmax", "=", "None", ")", "columns_array", ".", "append", "(", "pyfits", ".", "Column", "(", "name", "=", "'%i%i'", "%", "(", "index_mag_err_1", ",", "index_mag_err_2", ")", ",", "format", "=", "'%iE'", "%", "(", "histo_isochrone_pdf_convolve", ".", "shape", "[", "1", "]", ")", ",", "array", "=", "histo_isochrone_pdf_convolve", ")", ")", "hdu", "=", "pyfits", ".", "new_table", "(", "columns_array", ")", "hdu", ".", "header", ".", "update", "(", "'DIST_MOD'", ",", "distance_modulus", ")", "hdu", ".", "name", "=", "'%.2f'", "%", "(", "distance_modulus", ")", "hdul", ".", "append", "(", "hdu", ")", "time_end", "=", "time", ".", "time", "(", ")", "logger", ".", "debug", "(", "'%.2f s'", "%", "(", "time_end", "-", "time_start", ")", ")", "# Store distance modulus info", "columns_array", "=", "[", "pyfits", ".", "Column", "(", "name", "=", "'DISTANCE_MODULUS'", ",", "format", "=", "'E'", ",", "array", "=", "distance_modulus_array", ")", "]", "hdu", "=", "pyfits", ".", "new_table", "(", "columns_array", ")", "hdu", ".", "name", "=", "'DISTANCE_MODULUS'", "hdul", ".", "append", "(", "hdu", ")", "# Store magnitude error info", "columns_array", "=", "[", "pyfits", ".", "Column", "(", "name", "=", "'BINS_MAG_ERR'", ",", "format", "=", "'E'", ",", "array", "=", "numpy", ".", "insert", "(", "mag_err_array", ",", "0", ",", "0.", ")", ")", "]", "hdu", "=", "pyfits", ".", "new_table", "(", "columns_array", ")", "hdu", ".", "name", "=", "'BINS_MAG_ERR'", "hdul", ".", "append", "(", "hdu", ")", "# Store magnitude 1 info", "columns_array", "=", "[", "pyfits", ".", "Column", "(", "name", "=", "'BINS_MAG_1'", ",", "format", "=", "'E'", ",", "array", "=", "bins_mag_1", ")", "]", "hdu", "=", "pyfits", ".", "new_table", "(", "columns_array", ")", "hdu", ".", "name", "=", "'BINS_MAG_1'", "hdul", ".", "append", "(", "hdu", ")", "# Store magnitude 2 info", "columns_array", "=", "[", "pyfits", ".", "Column", "(", "name", "=", "'BINS_MAG_2'", ",", "format", "=", "'E'", ",", "array", "=", "bins_mag_2", ")", "]", "hdu", "=", "pyfits", ".", "new_table", "(", "columns_array", ")", "hdu", ".", "name", "=", "'BINS_MAG_2'", "hdul", ".", "append", "(", "hdu", ")", "logger", ".", "info", "(", "'Writing look-up table to %s'", "%", "(", "outfile", ")", ")", "hdul", ".", "writeto", "(", "outfile", ",", "clobber", "=", "True", ")" ]
50.012579
29.748428
def validate(config): ''' Validate the beacon configuration ''' VALID_ITEMS = [ 'type', 'bytes_sent', 'bytes_recv', 'packets_sent', 'packets_recv', 'errin', 'errout', 'dropin', 'dropout' ] # Configuration for load beacon should be a list of dicts if not isinstance(config, list): return False, ('Configuration for network_info beacon must be a list.') else: _config = {} list(map(_config.update, config)) for item in _config.get('interfaces', {}): if not isinstance(_config['interfaces'][item], dict): return False, ('Configuration for network_info beacon must ' 'be a list of dictionaries.') else: if not any(j in VALID_ITEMS for j in _config['interfaces'][item]): return False, ('Invalid configuration item in ' 'Beacon configuration.') return True, 'Valid beacon configuration'
[ "def", "validate", "(", "config", ")", ":", "VALID_ITEMS", "=", "[", "'type'", ",", "'bytes_sent'", ",", "'bytes_recv'", ",", "'packets_sent'", ",", "'packets_recv'", ",", "'errin'", ",", "'errout'", ",", "'dropin'", ",", "'dropout'", "]", "# Configuration for load beacon should be a list of dicts", "if", "not", "isinstance", "(", "config", ",", "list", ")", ":", "return", "False", ",", "(", "'Configuration for network_info beacon must be a list.'", ")", "else", ":", "_config", "=", "{", "}", "list", "(", "map", "(", "_config", ".", "update", ",", "config", ")", ")", "for", "item", "in", "_config", ".", "get", "(", "'interfaces'", ",", "{", "}", ")", ":", "if", "not", "isinstance", "(", "_config", "[", "'interfaces'", "]", "[", "item", "]", ",", "dict", ")", ":", "return", "False", ",", "(", "'Configuration for network_info beacon must '", "'be a list of dictionaries.'", ")", "else", ":", "if", "not", "any", "(", "j", "in", "VALID_ITEMS", "for", "j", "in", "_config", "[", "'interfaces'", "]", "[", "item", "]", ")", ":", "return", "False", ",", "(", "'Invalid configuration item in '", "'Beacon configuration.'", ")", "return", "True", ",", "'Valid beacon configuration'" ]
35.392857
24.321429
def get_order_detail(self, code): """ 查询A股Level 2权限下提供的委托明细 :param code: 股票代码,例如:'HK.02318' :return: (ret, data) ret == RET_OK data为1个dict,包含以下数据 ret != RET_OK data为错误字符串 {‘code’: 股票代码 ‘Ask’:[ order_num, [order_volume1, order_volume2] ] ‘Bid’: [ order_num, [order_volume1, order_volume2] ] } 'Ask':卖盘, 'Bid'买盘。order_num指委托订单数量,order_volume是每笔委托的委托量,当前最多返回前50笔委托的委托数量。即order_num有可能多于后面的order_volume """ if code is None or is_str(code) is False: error_str = ERROR_STR_PREFIX + "the type of code param is wrong" return RET_ERROR, error_str query_processor = self._get_sync_query_processor( OrderDetail.pack_req, OrderDetail.unpack_rsp) kargs = { "code": code, "conn_id": self.get_sync_conn_id() } ret_code, msg, order_detail = query_processor(**kargs) if ret_code == RET_ERROR: return ret_code, msg return RET_OK, order_detail
[ "def", "get_order_detail", "(", "self", ",", "code", ")", ":", "if", "code", "is", "None", "or", "is_str", "(", "code", ")", "is", "False", ":", "error_str", "=", "ERROR_STR_PREFIX", "+", "\"the type of code param is wrong\"", "return", "RET_ERROR", ",", "error_str", "query_processor", "=", "self", ".", "_get_sync_query_processor", "(", "OrderDetail", ".", "pack_req", ",", "OrderDetail", ".", "unpack_rsp", ")", "kargs", "=", "{", "\"code\"", ":", "code", ",", "\"conn_id\"", ":", "self", ".", "get_sync_conn_id", "(", ")", "}", "ret_code", ",", "msg", ",", "order_detail", "=", "query_processor", "(", "*", "*", "kargs", ")", "if", "ret_code", "==", "RET_ERROR", ":", "return", "ret_code", ",", "msg", "return", "RET_OK", ",", "order_detail" ]
30.8
21.885714
def plot(self, flip=False, ax_channels=None, ax=None, *args, **kwargs): """ {_gate_plot_doc} """ for gate in self.gates: gate.plot(flip=flip, ax_channels=ax_channels, ax=ax, *args, **kwargs)
[ "def", "plot", "(", "self", ",", "flip", "=", "False", ",", "ax_channels", "=", "None", ",", "ax", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "gate", "in", "self", ".", "gates", ":", "gate", ".", "plot", "(", "flip", "=", "flip", ",", "ax_channels", "=", "ax_channels", ",", "ax", "=", "ax", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
38.166667
16.166667
def init(policy_file=None, rules=None, default_rule=None, use_conf=True): """Init an Enforcer class. :param policy_file: Custom policy file to use, if none is specified, `CONF.policy_file` will be used. :param rules: Default dictionary / Rules to use. It will be considered just in the first instantiation. :param default_rule: Default rule to use, CONF.default_rule will be used if none is specified. :param use_conf: Whether to load rules from config file. """ global _ENFORCER global saved_file_rules if not _ENFORCER: _ENFORCER = policy.Enforcer(CONF, policy_file=policy_file, rules=rules, default_rule=default_rule, use_conf=use_conf ) register_rules(_ENFORCER) _ENFORCER.load_rules() # Only the rules which are loaded from file may be changed current_file_rules = _ENFORCER.file_rules current_file_rules = _serialize_rules(current_file_rules) if saved_file_rules != current_file_rules: _warning_for_deprecated_user_based_rules(current_file_rules) saved_file_rules = copy.deepcopy(current_file_rules)
[ "def", "init", "(", "policy_file", "=", "None", ",", "rules", "=", "None", ",", "default_rule", "=", "None", ",", "use_conf", "=", "True", ")", ":", "global", "_ENFORCER", "global", "saved_file_rules", "if", "not", "_ENFORCER", ":", "_ENFORCER", "=", "policy", ".", "Enforcer", "(", "CONF", ",", "policy_file", "=", "policy_file", ",", "rules", "=", "rules", ",", "default_rule", "=", "default_rule", ",", "use_conf", "=", "use_conf", ")", "register_rules", "(", "_ENFORCER", ")", "_ENFORCER", ".", "load_rules", "(", ")", "# Only the rules which are loaded from file may be changed", "current_file_rules", "=", "_ENFORCER", ".", "file_rules", "current_file_rules", "=", "_serialize_rules", "(", "current_file_rules", ")", "if", "saved_file_rules", "!=", "current_file_rules", ":", "_warning_for_deprecated_user_based_rules", "(", "current_file_rules", ")", "saved_file_rules", "=", "copy", ".", "deepcopy", "(", "current_file_rules", ")" ]
43.193548
19.548387
def _GetVSSStoreIdentifiers(self, scan_node): """Determines the VSS store identifiers. Args: scan_node (dfvfs.SourceScanNode): scan node. Returns: list[str]: VSS store identifiers. Raises: SourceScannerError: if the format of or within the source is not supported or the scan node is invalid. UserAbort: if the user requested to abort. """ if not scan_node or not scan_node.path_spec: raise errors.SourceScannerError('Invalid scan node.') volume_system = vshadow_volume_system.VShadowVolumeSystem() volume_system.Open(scan_node.path_spec) volume_identifiers = self._source_scanner.GetVolumeIdentifiers( volume_system) if not volume_identifiers: return [] # TODO: refactor to use scan options. if self._vss_stores: if self._vss_stores == 'all': vss_stores = range(1, volume_system.number_of_volumes + 1) else: vss_stores = self._vss_stores selected_volume_identifiers = self._NormalizedVolumeIdentifiers( volume_system, vss_stores, prefix='vss') if not set(selected_volume_identifiers).difference(volume_identifiers): return selected_volume_identifiers try: volume_identifiers = self._PromptUserForVSSStoreIdentifiers( volume_system, volume_identifiers) except KeyboardInterrupt: raise errors.UserAbort('File system scan aborted.') return self._NormalizedVolumeIdentifiers( volume_system, volume_identifiers, prefix='vss')
[ "def", "_GetVSSStoreIdentifiers", "(", "self", ",", "scan_node", ")", ":", "if", "not", "scan_node", "or", "not", "scan_node", ".", "path_spec", ":", "raise", "errors", ".", "SourceScannerError", "(", "'Invalid scan node.'", ")", "volume_system", "=", "vshadow_volume_system", ".", "VShadowVolumeSystem", "(", ")", "volume_system", ".", "Open", "(", "scan_node", ".", "path_spec", ")", "volume_identifiers", "=", "self", ".", "_source_scanner", ".", "GetVolumeIdentifiers", "(", "volume_system", ")", "if", "not", "volume_identifiers", ":", "return", "[", "]", "# TODO: refactor to use scan options.", "if", "self", ".", "_vss_stores", ":", "if", "self", ".", "_vss_stores", "==", "'all'", ":", "vss_stores", "=", "range", "(", "1", ",", "volume_system", ".", "number_of_volumes", "+", "1", ")", "else", ":", "vss_stores", "=", "self", ".", "_vss_stores", "selected_volume_identifiers", "=", "self", ".", "_NormalizedVolumeIdentifiers", "(", "volume_system", ",", "vss_stores", ",", "prefix", "=", "'vss'", ")", "if", "not", "set", "(", "selected_volume_identifiers", ")", ".", "difference", "(", "volume_identifiers", ")", ":", "return", "selected_volume_identifiers", "try", ":", "volume_identifiers", "=", "self", ".", "_PromptUserForVSSStoreIdentifiers", "(", "volume_system", ",", "volume_identifiers", ")", "except", "KeyboardInterrupt", ":", "raise", "errors", ".", "UserAbort", "(", "'File system scan aborted.'", ")", "return", "self", ".", "_NormalizedVolumeIdentifiers", "(", "volume_system", ",", "volume_identifiers", ",", "prefix", "=", "'vss'", ")" ]
31.659574
21.12766
def set_constant(self, name, value): """ Set constant of name to value. :param name: may be a str or a sympy.Symbol :param value: must be an int """ assert isinstance(name, str) or isinstance(name, sympy.Symbol), \ "constant name needs to be of type str, unicode or a sympy.Symbol" assert type(value) is int, "constant value needs to be of type int" if isinstance(name, sympy.Symbol): self.constants[name] = value else: self.constants[symbol_pos_int(name)] = value
[ "def", "set_constant", "(", "self", ",", "name", ",", "value", ")", ":", "assert", "isinstance", "(", "name", ",", "str", ")", "or", "isinstance", "(", "name", ",", "sympy", ".", "Symbol", ")", ",", "\"constant name needs to be of type str, unicode or a sympy.Symbol\"", "assert", "type", "(", "value", ")", "is", "int", ",", "\"constant value needs to be of type int\"", "if", "isinstance", "(", "name", ",", "sympy", ".", "Symbol", ")", ":", "self", ".", "constants", "[", "name", "]", "=", "value", "else", ":", "self", ".", "constants", "[", "symbol_pos_int", "(", "name", ")", "]", "=", "value" ]
40
15.142857
def create_pending_tasks(self): """ Creates pending task results in a dict on self.result with task string as key. It will also create a list on self.tasks that is used to make sure the serialization of the results creates a correctly ordered list. """ for task in self.settings.services: task = self.create_service_command(task) self.service_tasks.append(task) self.service_results[task] = Result(task) for task in self.settings.tasks['setup']: self.setup_tasks.append(task) self.setup_results[task] = Result(task) for task in self.settings.tasks['tests']: self.tasks.append(task) self.results[task] = Result(task)
[ "def", "create_pending_tasks", "(", "self", ")", ":", "for", "task", "in", "self", ".", "settings", ".", "services", ":", "task", "=", "self", ".", "create_service_command", "(", "task", ")", "self", ".", "service_tasks", ".", "append", "(", "task", ")", "self", ".", "service_results", "[", "task", "]", "=", "Result", "(", "task", ")", "for", "task", "in", "self", ".", "settings", ".", "tasks", "[", "'setup'", "]", ":", "self", ".", "setup_tasks", ".", "append", "(", "task", ")", "self", ".", "setup_results", "[", "task", "]", "=", "Result", "(", "task", ")", "for", "task", "in", "self", ".", "settings", ".", "tasks", "[", "'tests'", "]", ":", "self", ".", "tasks", ".", "append", "(", "task", ")", "self", ".", "results", "[", "task", "]", "=", "Result", "(", "task", ")" ]
41.555556
15.222222
def scompressed2ibytes(stream): """ :param stream: SOMETHING WITH read() METHOD TO GET MORE BYTES :return: GENERATOR OF UNCOMPRESSED BYTES """ def more(): try: while True: bytes_ = stream.read(4096) if not bytes_: return yield bytes_ except Exception as e: Log.error("Problem iterating through stream", cause=e) finally: with suppress_exception: stream.close() return icompressed2ibytes(more())
[ "def", "scompressed2ibytes", "(", "stream", ")", ":", "def", "more", "(", ")", ":", "try", ":", "while", "True", ":", "bytes_", "=", "stream", ".", "read", "(", "4096", ")", "if", "not", "bytes_", ":", "return", "yield", "bytes_", "except", "Exception", "as", "e", ":", "Log", ".", "error", "(", "\"Problem iterating through stream\"", ",", "cause", "=", "e", ")", "finally", ":", "with", "suppress_exception", ":", "stream", ".", "close", "(", ")", "return", "icompressed2ibytes", "(", "more", "(", ")", ")" ]
28.736842
13.894737
def pool_list(call=None): ''' Get a list of Resource Pools .. code-block:: bash salt-cloud -f pool_list myxen ''' if call == 'action': raise SaltCloudSystemExit( 'This function must be called with -f, --function argument.' ) ret = {} session = _get_session() pools = session.xenapi.pool.get_all() for pool in pools: pool_record = session.xenapi.pool.get_record(pool) ret[pool_record['name_label']] = pool_record return ret
[ "def", "pool_list", "(", "call", "=", "None", ")", ":", "if", "call", "==", "'action'", ":", "raise", "SaltCloudSystemExit", "(", "'This function must be called with -f, --function argument.'", ")", "ret", "=", "{", "}", "session", "=", "_get_session", "(", ")", "pools", "=", "session", ".", "xenapi", ".", "pool", ".", "get_all", "(", ")", "for", "pool", "in", "pools", ":", "pool_record", "=", "session", ".", "xenapi", ".", "pool", ".", "get_record", "(", "pool", ")", "ret", "[", "pool_record", "[", "'name_label'", "]", "]", "=", "pool_record", "return", "ret" ]
24.9
21.4
def pil_image3d(input, size=(800, 600), pcb_rotate=(0, 0, 0), timeout=20, showgui=False): ''' same as export_image3d, but there is no output file, PIL object is returned instead ''' f = tempfile.NamedTemporaryFile(suffix='.png', prefix='eagexp_') output = f.name export_image3d(input, output=output, size=size, pcb_rotate=pcb_rotate, timeout=timeout, showgui=showgui) im = Image.open(output) return im
[ "def", "pil_image3d", "(", "input", ",", "size", "=", "(", "800", ",", "600", ")", ",", "pcb_rotate", "=", "(", "0", ",", "0", ",", "0", ")", ",", "timeout", "=", "20", ",", "showgui", "=", "False", ")", ":", "f", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.png'", ",", "prefix", "=", "'eagexp_'", ")", "output", "=", "f", ".", "name", "export_image3d", "(", "input", ",", "output", "=", "output", ",", "size", "=", "size", ",", "pcb_rotate", "=", "pcb_rotate", ",", "timeout", "=", "timeout", ",", "showgui", "=", "showgui", ")", "im", "=", "Image", ".", "open", "(", "output", ")", "return", "im" ]
36.916667
31.416667
def get_default_config_help(self): """ Returns the help text for the configuration options for this handler """ config = super(rmqHandler, self).get_default_config_help() config.update({ 'server': '', 'rmq_exchange': '', }) return config
[ "def", "get_default_config_help", "(", "self", ")", ":", "config", "=", "super", "(", "rmqHandler", ",", "self", ")", ".", "get_default_config_help", "(", ")", "config", ".", "update", "(", "{", "'server'", ":", "''", ",", "'rmq_exchange'", ":", "''", ",", "}", ")", "return", "config" ]
28
18
def exit(self, result=0, msg=None, *args, **kwargs): """Exit the runtime.""" if self._finalizer: try: self._finalizer() except Exception as e: try: NailgunProtocol.send_stderr( self._socket, '\nUnexpected exception in finalizer: {!r}\n'.format(e) ) except Exception: pass try: # Write a final message to stderr if present. if msg: NailgunProtocol.send_stderr(self._socket, msg) # Send an Exit chunk with the result. NailgunProtocol.send_exit_with_code(self._socket, result) # Shutdown the connected socket. teardown_socket(self._socket) finally: super(DaemonExiter, self).exit(result=result, *args, **kwargs)
[ "def", "exit", "(", "self", ",", "result", "=", "0", ",", "msg", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_finalizer", ":", "try", ":", "self", ".", "_finalizer", "(", ")", "except", "Exception", "as", "e", ":", "try", ":", "NailgunProtocol", ".", "send_stderr", "(", "self", ".", "_socket", ",", "'\\nUnexpected exception in finalizer: {!r}\\n'", ".", "format", "(", "e", ")", ")", "except", "Exception", ":", "pass", "try", ":", "# Write a final message to stderr if present.", "if", "msg", ":", "NailgunProtocol", ".", "send_stderr", "(", "self", ".", "_socket", ",", "msg", ")", "# Send an Exit chunk with the result.", "NailgunProtocol", ".", "send_exit_with_code", "(", "self", ".", "_socket", ",", "result", ")", "# Shutdown the connected socket.", "teardown_socket", "(", "self", ".", "_socket", ")", "finally", ":", "super", "(", "DaemonExiter", ",", "self", ")", ".", "exit", "(", "result", "=", "result", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
28.538462
20.038462
def run(): """ Runs the linter and tests :return: A bool - if the linter and tests ran successfully """ print('Python ' + sys.version.replace('\n', '')) try: oscrypto_tests_module_info = imp.find_module('tests', [os.path.join(build_root, 'oscrypto')]) oscrypto_tests = imp.load_module('oscrypto.tests', *oscrypto_tests_module_info) oscrypto = oscrypto_tests.local_oscrypto() print('\noscrypto backend: %s' % oscrypto.backend()) except (ImportError): pass if run_lint: print('') lint_result = run_lint() else: lint_result = True if run_coverage: print('\nRunning tests (via coverage.py)') sys.stdout.flush() tests_result = run_coverage(ci=True) else: print('\nRunning tests') sys.stdout.flush() tests_result = run_tests() sys.stdout.flush() return lint_result and tests_result
[ "def", "run", "(", ")", ":", "print", "(", "'Python '", "+", "sys", ".", "version", ".", "replace", "(", "'\\n'", ",", "''", ")", ")", "try", ":", "oscrypto_tests_module_info", "=", "imp", ".", "find_module", "(", "'tests'", ",", "[", "os", ".", "path", ".", "join", "(", "build_root", ",", "'oscrypto'", ")", "]", ")", "oscrypto_tests", "=", "imp", ".", "load_module", "(", "'oscrypto.tests'", ",", "*", "oscrypto_tests_module_info", ")", "oscrypto", "=", "oscrypto_tests", ".", "local_oscrypto", "(", ")", "print", "(", "'\\noscrypto backend: %s'", "%", "oscrypto", ".", "backend", "(", ")", ")", "except", "(", "ImportError", ")", ":", "pass", "if", "run_lint", ":", "print", "(", "''", ")", "lint_result", "=", "run_lint", "(", ")", "else", ":", "lint_result", "=", "True", "if", "run_coverage", ":", "print", "(", "'\\nRunning tests (via coverage.py)'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "tests_result", "=", "run_coverage", "(", "ci", "=", "True", ")", "else", ":", "print", "(", "'\\nRunning tests'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "tests_result", "=", "run_tests", "(", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "return", "lint_result", "and", "tests_result" ]
26.257143
22.2
def FqdnUrl(v): """Verify that the value is a Fully qualified domain name URL. >>> s = Schema(FqdnUrl()) >>> with raises(MultipleInvalid, 'expected a Fully qualified domain name URL'): ... s("http://localhost/") >>> s('http://w3.org') 'http://w3.org' """ try: parsed_url = _url_validation(v) if "." not in parsed_url.netloc: raise UrlInvalid("must have a domain name in URL") return v except: raise ValueError
[ "def", "FqdnUrl", "(", "v", ")", ":", "try", ":", "parsed_url", "=", "_url_validation", "(", "v", ")", "if", "\".\"", "not", "in", "parsed_url", ".", "netloc", ":", "raise", "UrlInvalid", "(", "\"must have a domain name in URL\"", ")", "return", "v", "except", ":", "raise", "ValueError" ]
29.8125
17.875
def parse_partition(rule): ''' Parse the partition line ''' parser = argparse.ArgumentParser() rules = shlex.split(rule) rules.pop(0) parser.add_argument('mntpoint') parser.add_argument('--size', dest='size', action='store') parser.add_argument('--grow', dest='grow', action='store_true') parser.add_argument('--maxsize', dest='maxsize', action='store') parser.add_argument('--noformat', dest='noformat', action='store_true') parser.add_argument('--onpart', '--usepart', dest='onpart', action='store') parser.add_argument('--ondisk', '--ondrive', dest='ondisk', action='store') parser.add_argument('--asprimary', dest='asprimary', action='store_true') parser.add_argument('--fsprofile', dest='fsprofile', action='store') parser.add_argument('--fstype', dest='fstype', action='store') parser.add_argument('--fsoptions', dest='fsoptions', action='store') parser.add_argument('--label', dest='label', action='store') parser.add_argument('--recommended', dest='recommended', action='store_true') parser.add_argument('--onbiosdisk', dest='onbiosdisk', action='store') parser.add_argument('--encrypted', dest='encrypted', action='store_true') parser.add_argument('--passphrase', dest='passphrase', action='store') parser.add_argument('--escrowcert', dest='escrowcert', action='store') parser.add_argument('--backupphrase', dest='backupphrase', action='store') args = clean_args(vars(parser.parse_args(rules))) parser = None return args
[ "def", "parse_partition", "(", "rule", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "rules", "=", "shlex", ".", "split", "(", "rule", ")", "rules", ".", "pop", "(", "0", ")", "parser", ".", "add_argument", "(", "'mntpoint'", ")", "parser", ".", "add_argument", "(", "'--size'", ",", "dest", "=", "'size'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--grow'", ",", "dest", "=", "'grow'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--maxsize'", ",", "dest", "=", "'maxsize'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--noformat'", ",", "dest", "=", "'noformat'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--onpart'", ",", "'--usepart'", ",", "dest", "=", "'onpart'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--ondisk'", ",", "'--ondrive'", ",", "dest", "=", "'ondisk'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--asprimary'", ",", "dest", "=", "'asprimary'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--fsprofile'", ",", "dest", "=", "'fsprofile'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--fstype'", ",", "dest", "=", "'fstype'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--fsoptions'", ",", "dest", "=", "'fsoptions'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--label'", ",", "dest", "=", "'label'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--recommended'", ",", "dest", "=", "'recommended'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--onbiosdisk'", ",", "dest", "=", "'onbiosdisk'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--encrypted'", ",", "dest", "=", "'encrypted'", ",", "action", "=", "'store_true'", ")", "parser", ".", "add_argument", "(", "'--passphrase'", ",", "dest", "=", "'passphrase'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--escrowcert'", ",", "dest", "=", "'escrowcert'", ",", "action", "=", "'store'", ")", "parser", ".", "add_argument", "(", "'--backupphrase'", ",", "dest", "=", "'backupphrase'", ",", "action", "=", "'store'", ")", "args", "=", "clean_args", "(", "vars", "(", "parser", ".", "parse_args", "(", "rules", ")", ")", ")", "parser", "=", "None", "return", "args" ]
51.1
25.9
def _update_input_func_list(self): """ Update sources list from receiver. Internal method which updates sources list of receiver after getting sources and potential renaming information from receiver. """ # Get all sources and renaming information from receiver # For structural information of the variables please see the methods receiver_sources = self._get_receiver_sources() if not receiver_sources: _LOGGER.error("Receiver sources list empty. " "Please check if device is powered on.") return False # First input_func_list determination of AVR-X receivers if self._receiver_type in [AVR_X.type, AVR_X_2016.type]: renamed_sources, deleted_sources, status_success = ( self._get_renamed_deleted_sourcesapp()) # Backup if previous try with AppCommand was not successful if not status_success: renamed_sources, deleted_sources = ( self._get_renamed_deleted_sources()) # Remove all deleted sources if self._show_all_inputs is False: for deleted_source in deleted_sources.items(): if deleted_source[1] == "DEL": receiver_sources.pop(deleted_source[0], None) # Clear and rebuild the sources lists self._input_func_list.clear() self._input_func_list_rev.clear() self._netaudio_func_list.clear() self._playing_func_list.clear() for item in receiver_sources.items(): # Mapping of item[0] because some func names are inconsistant # at AVR-X receivers m_item_0 = SOURCE_MAPPING.get(item[0], item[0]) # For renamed sources use those names and save the default name # for a later mapping if item[0] in renamed_sources: self._input_func_list[renamed_sources[item[0]]] = m_item_0 self._input_func_list_rev[ m_item_0] = renamed_sources[item[0]] # If the source is a netaudio source, save its renamed name if item[0] in NETAUDIO_SOURCES: self._netaudio_func_list.append( renamed_sources[item[0]]) # If the source is a playing source, save its renamed name if item[0] in PLAYING_SOURCES: self._playing_func_list.append( renamed_sources[item[0]]) # Otherwise the default names are used else: self._input_func_list[item[1]] = m_item_0 self._input_func_list_rev[m_item_0] = item[1] # If the source is a netaudio source, save its name if item[1] in NETAUDIO_SOURCES: self._netaudio_func_list.append(item[1]) # If the source is a playing source, save its name if item[1] in PLAYING_SOURCES: self._playing_func_list.append(item[1]) # Determination of input_func_list for non AVR-nonX receivers elif self._receiver_type == AVR.type: # Clear and rebuild the sources lists self._input_func_list.clear() self._input_func_list_rev.clear() self._netaudio_func_list.clear() self._playing_func_list.clear() for item in receiver_sources.items(): self._input_func_list[item[1]] = item[0] self._input_func_list_rev[item[0]] = item[1] # If the source is a netaudio source, save its name if item[0] in NETAUDIO_SOURCES: self._netaudio_func_list.append(item[1]) # If the source is a playing source, save its name if item[0] in PLAYING_SOURCES: self._playing_func_list.append(item[1]) else: _LOGGER.error('Receiver type not set yet.') return False # Finished return True
[ "def", "_update_input_func_list", "(", "self", ")", ":", "# Get all sources and renaming information from receiver", "# For structural information of the variables please see the methods", "receiver_sources", "=", "self", ".", "_get_receiver_sources", "(", ")", "if", "not", "receiver_sources", ":", "_LOGGER", ".", "error", "(", "\"Receiver sources list empty. \"", "\"Please check if device is powered on.\"", ")", "return", "False", "# First input_func_list determination of AVR-X receivers", "if", "self", ".", "_receiver_type", "in", "[", "AVR_X", ".", "type", ",", "AVR_X_2016", ".", "type", "]", ":", "renamed_sources", ",", "deleted_sources", ",", "status_success", "=", "(", "self", ".", "_get_renamed_deleted_sourcesapp", "(", ")", ")", "# Backup if previous try with AppCommand was not successful", "if", "not", "status_success", ":", "renamed_sources", ",", "deleted_sources", "=", "(", "self", ".", "_get_renamed_deleted_sources", "(", ")", ")", "# Remove all deleted sources", "if", "self", ".", "_show_all_inputs", "is", "False", ":", "for", "deleted_source", "in", "deleted_sources", ".", "items", "(", ")", ":", "if", "deleted_source", "[", "1", "]", "==", "\"DEL\"", ":", "receiver_sources", ".", "pop", "(", "deleted_source", "[", "0", "]", ",", "None", ")", "# Clear and rebuild the sources lists", "self", ".", "_input_func_list", ".", "clear", "(", ")", "self", ".", "_input_func_list_rev", ".", "clear", "(", ")", "self", ".", "_netaudio_func_list", ".", "clear", "(", ")", "self", ".", "_playing_func_list", ".", "clear", "(", ")", "for", "item", "in", "receiver_sources", ".", "items", "(", ")", ":", "# Mapping of item[0] because some func names are inconsistant", "# at AVR-X receivers", "m_item_0", "=", "SOURCE_MAPPING", ".", "get", "(", "item", "[", "0", "]", ",", "item", "[", "0", "]", ")", "# For renamed sources use those names and save the default name", "# for a later mapping", "if", "item", "[", "0", "]", "in", "renamed_sources", ":", "self", ".", "_input_func_list", "[", "renamed_sources", "[", "item", "[", "0", "]", "]", "]", "=", "m_item_0", "self", ".", "_input_func_list_rev", "[", "m_item_0", "]", "=", "renamed_sources", "[", "item", "[", "0", "]", "]", "# If the source is a netaudio source, save its renamed name", "if", "item", "[", "0", "]", "in", "NETAUDIO_SOURCES", ":", "self", ".", "_netaudio_func_list", ".", "append", "(", "renamed_sources", "[", "item", "[", "0", "]", "]", ")", "# If the source is a playing source, save its renamed name", "if", "item", "[", "0", "]", "in", "PLAYING_SOURCES", ":", "self", ".", "_playing_func_list", ".", "append", "(", "renamed_sources", "[", "item", "[", "0", "]", "]", ")", "# Otherwise the default names are used", "else", ":", "self", ".", "_input_func_list", "[", "item", "[", "1", "]", "]", "=", "m_item_0", "self", ".", "_input_func_list_rev", "[", "m_item_0", "]", "=", "item", "[", "1", "]", "# If the source is a netaudio source, save its name", "if", "item", "[", "1", "]", "in", "NETAUDIO_SOURCES", ":", "self", ".", "_netaudio_func_list", ".", "append", "(", "item", "[", "1", "]", ")", "# If the source is a playing source, save its name", "if", "item", "[", "1", "]", "in", "PLAYING_SOURCES", ":", "self", ".", "_playing_func_list", ".", "append", "(", "item", "[", "1", "]", ")", "# Determination of input_func_list for non AVR-nonX receivers", "elif", "self", ".", "_receiver_type", "==", "AVR", ".", "type", ":", "# Clear and rebuild the sources lists", "self", ".", "_input_func_list", ".", "clear", "(", ")", "self", ".", "_input_func_list_rev", ".", "clear", "(", ")", "self", ".", "_netaudio_func_list", ".", "clear", "(", ")", "self", ".", "_playing_func_list", ".", "clear", "(", ")", "for", "item", "in", "receiver_sources", ".", "items", "(", ")", ":", "self", ".", "_input_func_list", "[", "item", "[", "1", "]", "]", "=", "item", "[", "0", "]", "self", ".", "_input_func_list_rev", "[", "item", "[", "0", "]", "]", "=", "item", "[", "1", "]", "# If the source is a netaudio source, save its name", "if", "item", "[", "0", "]", "in", "NETAUDIO_SOURCES", ":", "self", ".", "_netaudio_func_list", ".", "append", "(", "item", "[", "1", "]", ")", "# If the source is a playing source, save its name", "if", "item", "[", "0", "]", "in", "PLAYING_SOURCES", ":", "self", ".", "_playing_func_list", ".", "append", "(", "item", "[", "1", "]", ")", "else", ":", "_LOGGER", ".", "error", "(", "'Receiver type not set yet.'", ")", "return", "False", "# Finished", "return", "True" ]
45.637363
19.197802
def register(cls, *args, **kwargs): """Register view to handler.""" if cls.app is None: return register(*args, handler=cls, **kwargs) return cls.app.register(*args, handler=cls, **kwargs)
[ "def", "register", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "cls", ".", "app", "is", "None", ":", "return", "register", "(", "*", "args", ",", "handler", "=", "cls", ",", "*", "*", "kwargs", ")", "return", "cls", ".", "app", ".", "register", "(", "*", "args", ",", "handler", "=", "cls", ",", "*", "*", "kwargs", ")" ]
43.8
11.2
def make_file_read_only(file_path): """ Removes the write permissions for the given file for owner, groups and others. :param file_path: The file whose privileges are revoked. :raise FileNotFoundError: If the given file does not exist. """ old_permissions = os.stat(file_path).st_mode os.chmod(file_path, old_permissions & ~WRITE_PERMISSIONS)
[ "def", "make_file_read_only", "(", "file_path", ")", ":", "old_permissions", "=", "os", ".", "stat", "(", "file_path", ")", ".", "st_mode", "os", ".", "chmod", "(", "file_path", ",", "old_permissions", "&", "~", "WRITE_PERMISSIONS", ")" ]
40.333333
17.666667
def followers(self): """ :class:`Feed <pypump.models.feed.Feed>` with all :class:`Person <pypump.models.person.Person>` objects following the person. Example: >>> alice = pump.Person('[email protected]') >>> for follower in alice.followers[:2]: ... print(follower.id) ... acct:[email protected] acct:[email protected] """ if self._followers is None: self._followers = Followers(self.links['followers'], pypump=self._pump) return self._followers
[ "def", "followers", "(", "self", ")", ":", "if", "self", ".", "_followers", "is", "None", ":", "self", ".", "_followers", "=", "Followers", "(", "self", ".", "links", "[", "'followers'", "]", ",", "pypump", "=", "self", ".", "_pump", ")", "return", "self", ".", "_followers" ]
37.666667
16.933333
def lazy_load_modules(*modules): """ Decorator to load module to perform related operation for specific function and delete the module from imports once the task is done. GC frees the memory related to module during clean-up. """ def decorator(function): def wrapper(*args, **kwargs): module_dict = {} for module_string in modules: module = __import__(module_string) # Add `module` entry in `sys.modules`. After deleting the module # from `sys.modules` and re-importing the module don't update # the module entry in `sys.modules` dict sys.modules[module.__package__] = module reload_module(module) module_dict[module_string] = module func_response = function(*args, **kwargs) for module_string, module in module_dict.items(): # delete idna module delete_module(module_string) del module # delete reference to idna return func_response return wrapper return decorator
[ "def", "lazy_load_modules", "(", "*", "modules", ")", ":", "def", "decorator", "(", "function", ")", ":", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "module_dict", "=", "{", "}", "for", "module_string", "in", "modules", ":", "module", "=", "__import__", "(", "module_string", ")", "# Add `module` entry in `sys.modules`. After deleting the module", "# from `sys.modules` and re-importing the module don't update", "# the module entry in `sys.modules` dict", "sys", ".", "modules", "[", "module", ".", "__package__", "]", "=", "module", "reload_module", "(", "module", ")", "module_dict", "[", "module_string", "]", "=", "module", "func_response", "=", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "module_string", ",", "module", "in", "module_dict", ".", "items", "(", ")", ":", "# delete idna module", "delete_module", "(", "module_string", ")", "del", "module", "# delete reference to idna", "return", "func_response", "return", "wrapper", "return", "decorator" ]
36.933333
18.466667
def add_experiment_choice(experiment, choice): """Adds an experiment choice""" redis = oz.redis.create_connection() oz.bandit.Experiment(redis, experiment).add_choice(choice)
[ "def", "add_experiment_choice", "(", "experiment", ",", "choice", ")", ":", "redis", "=", "oz", ".", "redis", ".", "create_connection", "(", ")", "oz", ".", "bandit", ".", "Experiment", "(", "redis", ",", "experiment", ")", ".", "add_choice", "(", "choice", ")" ]
45.75
7
def pause_unit(assess_status_func, services=None, ports=None, charm_func=None): """Pause a unit by stopping the services and setting 'unit-paused' in the local kv() store. Also checks that the services have stopped and ports are no longer being listened to. An optional charm_func() can be called that can either raise an Exception or return non None, None to indicate that the unit didn't pause cleanly. The signature for charm_func is: charm_func() -> message: string charm_func() is executed after any services are stopped, if supplied. The services object can either be: - None : no services were passed (an empty dict is returned) - a list of strings - A dictionary (optionally OrderedDict) {service_name: {'service': ..}} - An array of [{'service': service_name, ...}, ...] @param assess_status_func: (f() -> message: string | None) or None @param services: OPTIONAL see above @param ports: OPTIONAL list of port @param charm_func: function to run for custom charm pausing. @returns None @raises Exception(message) on an error for action_fail(). """ _, messages = manage_payload_services( 'pause', services=services, charm_func=charm_func) set_unit_paused() if assess_status_func: message = assess_status_func() if message: messages.append(message) if messages and not is_unit_upgrading_set(): raise Exception("Couldn't pause: {}".format("; ".join(messages)))
[ "def", "pause_unit", "(", "assess_status_func", ",", "services", "=", "None", ",", "ports", "=", "None", ",", "charm_func", "=", "None", ")", ":", "_", ",", "messages", "=", "manage_payload_services", "(", "'pause'", ",", "services", "=", "services", ",", "charm_func", "=", "charm_func", ")", "set_unit_paused", "(", ")", "if", "assess_status_func", ":", "message", "=", "assess_status_func", "(", ")", "if", "message", ":", "messages", ".", "append", "(", "message", ")", "if", "messages", "and", "not", "is_unit_upgrading_set", "(", ")", ":", "raise", "Exception", "(", "\"Couldn't pause: {}\"", ".", "format", "(", "\"; \"", ".", "join", "(", "messages", ")", ")", ")" ]
37.02439
19.146341
def get_agent_requirement_line(check, version): """ Compose a text line to be used in a requirements.txt file to install a check pinned to a specific version. """ package_name = get_package_name(check) # no manifest if check in ('datadog_checks_base', 'datadog_checks_downloader'): return '{}=={}'.format(package_name, version) m = load_manifest(check) platforms = sorted(m.get('supported_os', [])) # all platforms if platforms == ALL_PLATFORMS: return '{}=={}'.format(package_name, version) # one specific platform elif len(platforms) == 1: return "{}=={}; sys_platform == '{}'".format(package_name, version, PLATFORMS_TO_PY.get(platforms[0])) elif platforms: if 'windows' not in platforms: return "{}=={}; sys_platform != 'win32'".format(package_name, version) elif 'mac_os' not in platforms: return "{}=={}; sys_platform != 'darwin'".format(package_name, version) elif 'linux' not in platforms: return "{}=={}; sys_platform != 'linux2'".format(package_name, version) raise ManifestError("Can't parse the `supported_os` list for the check {}: {}".format(check, platforms))
[ "def", "get_agent_requirement_line", "(", "check", ",", "version", ")", ":", "package_name", "=", "get_package_name", "(", "check", ")", "# no manifest", "if", "check", "in", "(", "'datadog_checks_base'", ",", "'datadog_checks_downloader'", ")", ":", "return", "'{}=={}'", ".", "format", "(", "package_name", ",", "version", ")", "m", "=", "load_manifest", "(", "check", ")", "platforms", "=", "sorted", "(", "m", ".", "get", "(", "'supported_os'", ",", "[", "]", ")", ")", "# all platforms", "if", "platforms", "==", "ALL_PLATFORMS", ":", "return", "'{}=={}'", ".", "format", "(", "package_name", ",", "version", ")", "# one specific platform", "elif", "len", "(", "platforms", ")", "==", "1", ":", "return", "\"{}=={}; sys_platform == '{}'\"", ".", "format", "(", "package_name", ",", "version", ",", "PLATFORMS_TO_PY", ".", "get", "(", "platforms", "[", "0", "]", ")", ")", "elif", "platforms", ":", "if", "'windows'", "not", "in", "platforms", ":", "return", "\"{}=={}; sys_platform != 'win32'\"", ".", "format", "(", "package_name", ",", "version", ")", "elif", "'mac_os'", "not", "in", "platforms", ":", "return", "\"{}=={}; sys_platform != 'darwin'\"", ".", "format", "(", "package_name", ",", "version", ")", "elif", "'linux'", "not", "in", "platforms", ":", "return", "\"{}=={}; sys_platform != 'linux2'\"", ".", "format", "(", "package_name", ",", "version", ")", "raise", "ManifestError", "(", "\"Can't parse the `supported_os` list for the check {}: {}\"", ".", "format", "(", "check", ",", "platforms", ")", ")" ]
41.172414
22.689655
def read(self, n): """Read data from file segs. Args: n: max bytes to read. Must be positive. Returns: some bytes. May be smaller than n bytes. "" when no more data is left. """ if self._EOF: return "" while self._seg_index <= self._last_seg_index: result = self._read_from_seg(n) if result != "": return result else: self._next_seg() self._EOF = True return ""
[ "def", "read", "(", "self", ",", "n", ")", ":", "if", "self", ".", "_EOF", ":", "return", "\"\"", "while", "self", ".", "_seg_index", "<=", "self", ".", "_last_seg_index", ":", "result", "=", "self", ".", "_read_from_seg", "(", "n", ")", "if", "result", "!=", "\"\"", ":", "return", "result", "else", ":", "self", ".", "_next_seg", "(", ")", "self", ".", "_EOF", "=", "True", "return", "\"\"" ]
20.428571
22.47619
def set_attributes_all(target, attributes, discard_others=True): """ Set Attributes in bulk and optionally discard others. Sets each Attribute in turn (modifying it in place if possible if it is already present) and optionally discarding all other Attributes not explicitly set. This function yields much greater performance than the required individual calls to ``set_attribute``, ``set_attribute_string``, ``set_attribute_string_array`` and ``del_attribute`` put together. .. versionadded:: 0.2 Parameters ---------- target : Dataset or Group Dataset or Group to set the Attributes of. attributes : dict The Attributes to set. The keys (``str``) are the names. The values are ``tuple`` of the Attribute kind and the value to set. Valid kinds are ``'string_array'``, ``'string'``, and ``'value'``. The values must correspond to what ``set_attribute_string_array``, ``set_attribute_string`` and ``set_attribute`` would take respectively. discard_others : bool, optional Whether to discard all other Attributes not explicitly set (default) or not. See Also -------- set_attribute set_attribute_string set_attribute_string_array """ attrs = target.attrs existing = dict(attrs.items()) # Generate special dtype for string arrays. if sys.hexversion >= 0x03000000: str_arr_dtype = h5py.special_dtype(vlen=str) else: str_arr_dtype = dtype=h5py.special_dtype(vlen=unicode) # Go through each attribute. If it is already present, modify it if # possible and create it otherwise (deletes old value.) for k, (kind, value) in attributes.items(): if kind == 'string_array': attrs.create(k, [convert_to_str(s) for s in value], dtype=str_arr_dtype) else: if kind == 'string': value = np.bytes_(value) if k not in existing: attrs.create(k, value) else: try: if value.dtype == existing[k].dtype \ and value.shape == existing[k].shape: attrs.modify(k, value) except: attrs.create(k, value) # Discard all other attributes. if discard_others: for k in set(existing) - set(attributes): del attrs[k]
[ "def", "set_attributes_all", "(", "target", ",", "attributes", ",", "discard_others", "=", "True", ")", ":", "attrs", "=", "target", ".", "attrs", "existing", "=", "dict", "(", "attrs", ".", "items", "(", ")", ")", "# Generate special dtype for string arrays.", "if", "sys", ".", "hexversion", ">=", "0x03000000", ":", "str_arr_dtype", "=", "h5py", ".", "special_dtype", "(", "vlen", "=", "str", ")", "else", ":", "str_arr_dtype", "=", "dtype", "=", "h5py", ".", "special_dtype", "(", "vlen", "=", "unicode", ")", "# Go through each attribute. If it is already present, modify it if", "# possible and create it otherwise (deletes old value.)", "for", "k", ",", "(", "kind", ",", "value", ")", "in", "attributes", ".", "items", "(", ")", ":", "if", "kind", "==", "'string_array'", ":", "attrs", ".", "create", "(", "k", ",", "[", "convert_to_str", "(", "s", ")", "for", "s", "in", "value", "]", ",", "dtype", "=", "str_arr_dtype", ")", "else", ":", "if", "kind", "==", "'string'", ":", "value", "=", "np", ".", "bytes_", "(", "value", ")", "if", "k", "not", "in", "existing", ":", "attrs", ".", "create", "(", "k", ",", "value", ")", "else", ":", "try", ":", "if", "value", ".", "dtype", "==", "existing", "[", "k", "]", ".", "dtype", "and", "value", ".", "shape", "==", "existing", "[", "k", "]", ".", "shape", ":", "attrs", ".", "modify", "(", "k", ",", "value", ")", "except", ":", "attrs", ".", "create", "(", "k", ",", "value", ")", "# Discard all other attributes.", "if", "discard_others", ":", "for", "k", "in", "set", "(", "existing", ")", "-", "set", "(", "attributes", ")", ":", "del", "attrs", "[", "k", "]" ]
37.920635
18.206349
def parse_command_line(): """ Parse CLI args.""" ## create the parser parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, epilog=""" * Example command-line usage: ## push test branch to conda --label=conda-test for travis CI ./versioner.py -p toytree -b test -t 0.1.7 ## push master as a new tag to git and conda ./versioner.py -p toytree -b master -t 0.1.7 --deploy ## build other deps on conda at --label=conda-test ./versioner.py -p toyplot --no-git ./versioner.py -p pypng --no-git """) ## add arguments parser.add_argument('-v', '--version', action='version', version="0.1") parser.add_argument('-p', #"--package", dest="package", default="toytree", type=str, help="the tag to put in __init__ and use on conda") parser.add_argument('-b', #"--branch", dest="branch", default="master", type=str, help="the branch to build conda package from") parser.add_argument('-t', #"--tag", dest="tag", default="test", type=str, help="the tag to put in __init__ and use on conda") parser.add_argument("--deploy", dest="deploy", action='store_true', help="push the tag to git and upload to conda main label") parser.add_argument("--no-git", dest="nogit", action='store_true', help="skip git update and only build/upload to conda") ## if no args then return help message if len(sys.argv) == 1: parser.print_help() sys.exit(1) ## parse args args = parser.parse_args() return args
[ "def", "parse_command_line", "(", ")", ":", "## create the parser", "parser", "=", "argparse", ".", "ArgumentParser", "(", "formatter_class", "=", "argparse", ".", "RawDescriptionHelpFormatter", ",", "epilog", "=", "\"\"\"\n * Example command-line usage: \n\n ## push test branch to conda --label=conda-test for travis CI\n ./versioner.py -p toytree -b test -t 0.1.7 \n\n ## push master as a new tag to git and conda\n ./versioner.py -p toytree -b master -t 0.1.7 --deploy\n\n ## build other deps on conda at --label=conda-test\n ./versioner.py -p toyplot --no-git\n ./versioner.py -p pypng --no-git\n\n \"\"\"", ")", "## add arguments ", "parser", ".", "add_argument", "(", "'-v'", ",", "'--version'", ",", "action", "=", "'version'", ",", "version", "=", "\"0.1\"", ")", "parser", ".", "add_argument", "(", "'-p'", ",", "#\"--package\", ", "dest", "=", "\"package\"", ",", "default", "=", "\"toytree\"", ",", "type", "=", "str", ",", "help", "=", "\"the tag to put in __init__ and use on conda\"", ")", "parser", ".", "add_argument", "(", "'-b'", ",", "#\"--branch\", ", "dest", "=", "\"branch\"", ",", "default", "=", "\"master\"", ",", "type", "=", "str", ",", "help", "=", "\"the branch to build conda package from\"", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "#\"--tag\", ", "dest", "=", "\"tag\"", ",", "default", "=", "\"test\"", ",", "type", "=", "str", ",", "help", "=", "\"the tag to put in __init__ and use on conda\"", ")", "parser", ".", "add_argument", "(", "\"--deploy\"", ",", "dest", "=", "\"deploy\"", ",", "action", "=", "'store_true'", ",", "help", "=", "\"push the tag to git and upload to conda main label\"", ")", "parser", ".", "add_argument", "(", "\"--no-git\"", ",", "dest", "=", "\"nogit\"", ",", "action", "=", "'store_true'", ",", "help", "=", "\"skip git update and only build/upload to conda\"", ")", "## if no args then return help message", "if", "len", "(", "sys", ".", "argv", ")", "==", "1", ":", "parser", ".", "print_help", "(", ")", "sys", ".", "exit", "(", "1", ")", "## parse args", "args", "=", "parser", ".", "parse_args", "(", ")", "return", "args" ]
26.354839
19.387097
def __driver_stub(self, text, state): """Display help messages or invoke the proper completer. The interface of helper methods and completer methods are documented in the helper() decorator method and the completer() decorator method, respectively. Arguments: text: A string, that is the current completion scope. state: An integer. Returns: A string used to replace the given text, if any. None if no completion candidates are found. Raises: This method is called via the readline callback. If this method raises an error, it is silently ignored by the readline library. This behavior makes debugging very difficult. For this reason, non-driver methods are run within try-except blocks. When an error occurs, the stack trace is printed to self.stderr. """ origline = readline.get_line_buffer() line = origline.lstrip() if line and line[-1] == '?': self.__driver_helper(line) else: toks = shlex.split(line) return self.__driver_completer(toks, text, state)
[ "def", "__driver_stub", "(", "self", ",", "text", ",", "state", ")", ":", "origline", "=", "readline", ".", "get_line_buffer", "(", ")", "line", "=", "origline", ".", "lstrip", "(", ")", "if", "line", "and", "line", "[", "-", "1", "]", "==", "'?'", ":", "self", ".", "__driver_helper", "(", "line", ")", "else", ":", "toks", "=", "shlex", ".", "split", "(", "line", ")", "return", "self", ".", "__driver_completer", "(", "toks", ",", "text", ",", "state", ")" ]
40.413793
21.827586
def push(args): """ %prog push unitig{version}.{partID}.{unitigID} For example, `%prog push unitig5.530` will push the modified `unitig530` and replace the one in the tigStore """ p = OptionParser(push.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) s, = args prefix = get_prefix() version, partID, unitigID = get_ID(s) cmd = "tigStore" cmd += " -g ../{0}.gkpStore -t ../{0}.tigStore".format(prefix) cmd += " {0} -up {1} -R {2}".format(version, partID, s) sh(cmd)
[ "def", "push", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "push", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "1", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "s", ",", "=", "args", "prefix", "=", "get_prefix", "(", ")", "version", ",", "partID", ",", "unitigID", "=", "get_ID", "(", "s", ")", "cmd", "=", "\"tigStore\"", "cmd", "+=", "\" -g ../{0}.gkpStore -t ../{0}.tigStore\"", ".", "format", "(", "prefix", ")", "cmd", "+=", "\" {0} -up {1} -R {2}\"", ".", "format", "(", "version", ",", "partID", ",", "s", ")", "sh", "(", "cmd", ")" ]
25.272727
20.090909
def search_regexp(self): """ Define the regexp used for the search """ if ((self.season == "") and (self.episode == "")): # Find serie try: print("%s has %s seasons (the serie is %s)" % (self.tvdb.data['seriesname'], self.tvdb.get_season_number(), self.tvdb.data['status'].lower())) # print self.tvdb.data except: pass regexp = '^%s.*' % self.title.lower() elif (self.episode == ""): # Find season try: print("%s has %s episodes in season %s" % (self.tvdb.data['seriesname'], self.tvdb.get_episode_number(int(self.season)), self.season)) except: pass regexp = '^%s.*(s[0]*%s|season[\s\_\-\.]*%s).*' % (self.title.lower(), self.season, self.season) else: # Find season and episode try: print("%s S%sE%s name is \"%s\"" % (self.tvdb.data['seriesname'], self.season, self.episode, self.tvdb.get_episode(int(self.season), int(self.episode))['episodename'])) except: pass regexp = '^%s.*((s[0]*%s.*e[0]*%s)|[0]*%sx[0]*%s).*' % (self.title.lower(), self.season, self.episode, self.season, self.episode) return regexp
[ "def", "search_regexp", "(", "self", ")", ":", "if", "(", "(", "self", ".", "season", "==", "\"\"", ")", "and", "(", "self", ".", "episode", "==", "\"\"", ")", ")", ":", "# Find serie", "try", ":", "print", "(", "\"%s has %s seasons (the serie is %s)\"", "%", "(", "self", ".", "tvdb", ".", "data", "[", "'seriesname'", "]", ",", "self", ".", "tvdb", ".", "get_season_number", "(", ")", ",", "self", ".", "tvdb", ".", "data", "[", "'status'", "]", ".", "lower", "(", ")", ")", ")", "# print self.tvdb.data", "except", ":", "pass", "regexp", "=", "'^%s.*'", "%", "self", ".", "title", ".", "lower", "(", ")", "elif", "(", "self", ".", "episode", "==", "\"\"", ")", ":", "# Find season", "try", ":", "print", "(", "\"%s has %s episodes in season %s\"", "%", "(", "self", ".", "tvdb", ".", "data", "[", "'seriesname'", "]", ",", "self", ".", "tvdb", ".", "get_episode_number", "(", "int", "(", "self", ".", "season", ")", ")", ",", "self", ".", "season", ")", ")", "except", ":", "pass", "regexp", "=", "'^%s.*(s[0]*%s|season[\\s\\_\\-\\.]*%s).*'", "%", "(", "self", ".", "title", ".", "lower", "(", ")", ",", "self", ".", "season", ",", "self", ".", "season", ")", "else", ":", "# Find season and episode", "try", ":", "print", "(", "\"%s S%sE%s name is \\\"%s\\\"\"", "%", "(", "self", ".", "tvdb", ".", "data", "[", "'seriesname'", "]", ",", "self", ".", "season", ",", "self", ".", "episode", ",", "self", ".", "tvdb", ".", "get_episode", "(", "int", "(", "self", ".", "season", ")", ",", "int", "(", "self", ".", "episode", ")", ")", "[", "'episodename'", "]", ")", ")", "except", ":", "pass", "regexp", "=", "'^%s.*((s[0]*%s.*e[0]*%s)|[0]*%sx[0]*%s).*'", "%", "(", "self", ".", "title", ".", "lower", "(", ")", ",", "self", ".", "season", ",", "self", ".", "episode", ",", "self", ".", "season", ",", "self", ".", "episode", ")", "return", "regexp" ]
48
32.296296
def args_to_props(target: Target, builder: Builder, args: list, kwargs: dict): """Convert build file `args` and `kwargs` to `target` props. Use builder signature to validate builder usage in build-file, raising appropriate exceptions on signature-mismatches. Use builder signature default values to assign props values to args that were not passed in the build-file call. This function handles only the arg/kwargs-to-prop assignment, including default values when necessary. When it returns, if no exception was raised, it is guaranteed that `target.props` contains all args defined in the builder registered signature, with values taken either from the build-file call, or from default values provided in the signature. Specifically, this function DOES NOT do anything about the arg types defined in the builder signature. :raise TypeError: On signature-call mismatch. """ if len(args) > len(builder.sig): # too many positional arguments supplied - say how many we can take raise TypeError('{}() takes {}, but {} were given' .format(target.builder_name, format_num_positional_arguments(builder), len(args))) # read given args into the matching props according to the signature for arg_name, value in zip(builder.sig.keys(), args): target.props[arg_name] = value # read given kwargs into the named props, asserting matching sig arg names for arg_name, value in kwargs.items(): if arg_name not in builder.sig: raise TypeError("{}() got an unexpected keyword argument '{}'" .format(target.builder_name, arg_name)) if arg_name in target.props: raise TypeError("{}() got multiple values for argument '{}'" .format(target.builder_name, arg_name)) target.props[arg_name] = value # go over signature args, assigning default values to anything that wasn't # assigned from args / kwargs, making sure no positional args are missing missing_args = [] for arg_name, sig_spec in builder.sig.items(): if arg_name not in target.props: if sig_spec.default == Empty: missing_args.append(arg_name) else: target.props[arg_name] = sig_spec.default if missing_args: # not enough positional arguments supplied - say which # TODO(itamar): match Python's error more closely (last "and "): # foo() missing 3 required positional arguments: 'a', 'b', and 'c' # TODO(itamar): use inflect raise TypeError('{}() missing {} required positional argument{}: {}' .format(target.builder_name, len(missing_args), 's' if len(missing_args) > 1 else '', ', '.join("'{}'".format(arg) for arg in missing_args))) logger.debug('Got props for target: {}', target)
[ "def", "args_to_props", "(", "target", ":", "Target", ",", "builder", ":", "Builder", ",", "args", ":", "list", ",", "kwargs", ":", "dict", ")", ":", "if", "len", "(", "args", ")", ">", "len", "(", "builder", ".", "sig", ")", ":", "# too many positional arguments supplied - say how many we can take", "raise", "TypeError", "(", "'{}() takes {}, but {} were given'", ".", "format", "(", "target", ".", "builder_name", ",", "format_num_positional_arguments", "(", "builder", ")", ",", "len", "(", "args", ")", ")", ")", "# read given args into the matching props according to the signature", "for", "arg_name", ",", "value", "in", "zip", "(", "builder", ".", "sig", ".", "keys", "(", ")", ",", "args", ")", ":", "target", ".", "props", "[", "arg_name", "]", "=", "value", "# read given kwargs into the named props, asserting matching sig arg names", "for", "arg_name", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "if", "arg_name", "not", "in", "builder", ".", "sig", ":", "raise", "TypeError", "(", "\"{}() got an unexpected keyword argument '{}'\"", ".", "format", "(", "target", ".", "builder_name", ",", "arg_name", ")", ")", "if", "arg_name", "in", "target", ".", "props", ":", "raise", "TypeError", "(", "\"{}() got multiple values for argument '{}'\"", ".", "format", "(", "target", ".", "builder_name", ",", "arg_name", ")", ")", "target", ".", "props", "[", "arg_name", "]", "=", "value", "# go over signature args, assigning default values to anything that wasn't", "# assigned from args / kwargs, making sure no positional args are missing", "missing_args", "=", "[", "]", "for", "arg_name", ",", "sig_spec", "in", "builder", ".", "sig", ".", "items", "(", ")", ":", "if", "arg_name", "not", "in", "target", ".", "props", ":", "if", "sig_spec", ".", "default", "==", "Empty", ":", "missing_args", ".", "append", "(", "arg_name", ")", "else", ":", "target", ".", "props", "[", "arg_name", "]", "=", "sig_spec", ".", "default", "if", "missing_args", ":", "# not enough positional arguments supplied - say which", "# TODO(itamar): match Python's error more closely (last \"and \"):", "# foo() missing 3 required positional arguments: 'a', 'b', and 'c'", "# TODO(itamar): use inflect", "raise", "TypeError", "(", "'{}() missing {} required positional argument{}: {}'", ".", "format", "(", "target", ".", "builder_name", ",", "len", "(", "missing_args", ")", ",", "'s'", "if", "len", "(", "missing_args", ")", ">", "1", "else", "''", ",", "', '", ".", "join", "(", "\"'{}'\"", ".", "format", "(", "arg", ")", "for", "arg", "in", "missing_args", ")", ")", ")", "logger", ".", "debug", "(", "'Got props for target: {}'", ",", "target", ")" ]
52.017241
21.931034
def transactional(func): """ Decorate a function call with a commit/rollback and pass the session as the first arg. """ @wraps(func) def wrapper(*args, **kwargs): with transaction(): return func(*args, **kwargs) return wrapper
[ "def", "transactional", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "transaction", "(", ")", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
26.2
17.2
def zscan(self, name, cursor='0', match=None, count=10): """Emulate zscan.""" def value_function(): values = self.zrange(name, 0, -1, withscores=True) values.sort(key=lambda x: x[1]) # sort for consistent order return values return self._common_scan(value_function, cursor=cursor, match=match, count=count, key=lambda v: v[0])
[ "def", "zscan", "(", "self", ",", "name", ",", "cursor", "=", "'0'", ",", "match", "=", "None", ",", "count", "=", "10", ")", ":", "def", "value_function", "(", ")", ":", "values", "=", "self", ".", "zrange", "(", "name", ",", "0", ",", "-", "1", ",", "withscores", "=", "True", ")", "values", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")", "# sort for consistent order", "return", "values", "return", "self", ".", "_common_scan", "(", "value_function", ",", "cursor", "=", "cursor", ",", "match", "=", "match", ",", "count", "=", "count", ",", "key", "=", "lambda", "v", ":", "v", "[", "0", "]", ")" ]
54.428571
23.571429
def get_forecast_sites(self): """ This function returns a list of Site object. """ time_now = time() if (time_now - self.forecast_sites_last_update) > self.forecast_sites_update_time or self.forecast_sites_last_request is None: data = self.__call_api("sitelist/") sites = list() for jsoned in data['Locations']['Location']: site = Site() site.name = jsoned['name'] site.id = jsoned['id'] site.latitude = jsoned['latitude'] site.longitude = jsoned['longitude'] if 'region' in jsoned: site.region = jsoned['region'] if 'elevation' in jsoned: site.elevation = jsoned['elevation'] if 'unitaryAuthArea' in jsoned: site.unitaryAuthArea = jsoned['unitaryAuthArea'] if 'nationalPark' in jsoned: site.nationalPark = jsoned['nationalPark'] site.api_key = self.api_key sites.append(site) self.forecast_sites_last_request = sites # Only set self.sites_last_update once self.sites_last_request has # been set self.forecast_sites_last_update = time_now else: sites = self.forecast_sites_last_request return sites
[ "def", "get_forecast_sites", "(", "self", ")", ":", "time_now", "=", "time", "(", ")", "if", "(", "time_now", "-", "self", ".", "forecast_sites_last_update", ")", ">", "self", ".", "forecast_sites_update_time", "or", "self", ".", "forecast_sites_last_request", "is", "None", ":", "data", "=", "self", ".", "__call_api", "(", "\"sitelist/\"", ")", "sites", "=", "list", "(", ")", "for", "jsoned", "in", "data", "[", "'Locations'", "]", "[", "'Location'", "]", ":", "site", "=", "Site", "(", ")", "site", ".", "name", "=", "jsoned", "[", "'name'", "]", "site", ".", "id", "=", "jsoned", "[", "'id'", "]", "site", ".", "latitude", "=", "jsoned", "[", "'latitude'", "]", "site", ".", "longitude", "=", "jsoned", "[", "'longitude'", "]", "if", "'region'", "in", "jsoned", ":", "site", ".", "region", "=", "jsoned", "[", "'region'", "]", "if", "'elevation'", "in", "jsoned", ":", "site", ".", "elevation", "=", "jsoned", "[", "'elevation'", "]", "if", "'unitaryAuthArea'", "in", "jsoned", ":", "site", ".", "unitaryAuthArea", "=", "jsoned", "[", "'unitaryAuthArea'", "]", "if", "'nationalPark'", "in", "jsoned", ":", "site", ".", "nationalPark", "=", "jsoned", "[", "'nationalPark'", "]", "site", ".", "api_key", "=", "self", ".", "api_key", "sites", ".", "append", "(", "site", ")", "self", ".", "forecast_sites_last_request", "=", "sites", "# Only set self.sites_last_update once self.sites_last_request has", "# been set", "self", ".", "forecast_sites_last_update", "=", "time_now", "else", ":", "sites", "=", "self", ".", "forecast_sites_last_request", "return", "sites" ]
34.4
20.15
def soft_bounce(self, unique_id, configs=None): """ Performs a soft bounce (stop and start) for the specified process :Parameter unique_id: the name of the process """ self.stop(unique_id, configs) self.start(unique_id, configs)
[ "def", "soft_bounce", "(", "self", ",", "unique_id", ",", "configs", "=", "None", ")", ":", "self", ".", "stop", "(", "unique_id", ",", "configs", ")", "self", ".", "start", "(", "unique_id", ",", "configs", ")" ]
34.714286
9.857143
def is_inbound_presence_filter(cb): """ Return true if `cb` has been decorated with :func:`inbound_presence_filter`. """ try: handlers = get_magic_attr(cb) except AttributeError: return False hs = HandlerSpec( (_apply_inbound_presence_filter, ()) ) return hs in handlers
[ "def", "is_inbound_presence_filter", "(", "cb", ")", ":", "try", ":", "handlers", "=", "get_magic_attr", "(", "cb", ")", "except", "AttributeError", ":", "return", "False", "hs", "=", "HandlerSpec", "(", "(", "_apply_inbound_presence_filter", ",", "(", ")", ")", ")", "return", "hs", "in", "handlers" ]
19.875
17.375
def pdf_lognormal(d, d_characteristic, s): r'''Calculates the probability density function of a lognormal particle distribution given a particle diameter `d`, characteristic particle diameter `d_characteristic`, and distribution standard deviation `s`. .. math:: q(d) = \frac{1}{ds\sqrt{2\pi}} \exp\left[-0.5\left(\frac{ \ln(d/d_{characteristic})}{s}\right)^2\right] Parameters ---------- d : float Specified particle diameter, [m] d_characteristic : float Characteristic particle diameter; often D[3, 3] is used for this purpose but not always, [m] s : float Distribution standard deviation, [-] Returns ------- pdf : float Lognormal probability density function, [-] Notes ----- The characteristic diameter can be in terns of number density (denoted :math:`q_0(d)`), length density (:math:`q_1(d)`), surface area density (:math:`q_2(d)`), or volume density (:math:`q_3(d)`). Volume density is most often used. Interconversions among the distributions is possible but tricky. The standard distribution (i.e. the one used in Scipy) can perform the same computation with `d_characteristic` as the value of `scale`. >>> import scipy.stats >>> scipy.stats.lognorm.pdf(x=1E-4, s=1.1, scale=1E-5) 405.5420921156425 Scipy's calculation is over 300 times slower however, and this expression is numerically integrated so speed is required. Examples -------- >>> pdf_lognormal(d=1E-4, d_characteristic=1E-5, s=1.1) 405.5420921156425 References ---------- .. [1] ISO 9276-2:2014 - Representation of Results of Particle Size Analysis - Part 2: Calculation of Average Particle Sizes/Diameters and Moments from Particle Size Distributions. ''' try: log_term = log(d/d_characteristic)/s except ValueError: return 0.0 return 1./(d*s*ROOT_TWO_PI)*exp(-0.5*log_term*log_term)
[ "def", "pdf_lognormal", "(", "d", ",", "d_characteristic", ",", "s", ")", ":", "try", ":", "log_term", "=", "log", "(", "d", "/", "d_characteristic", ")", "/", "s", "except", "ValueError", ":", "return", "0.0", "return", "1.", "/", "(", "d", "*", "s", "*", "ROOT_TWO_PI", ")", "*", "exp", "(", "-", "0.5", "*", "log_term", "*", "log_term", ")" ]
34.137931
25.896552
def set_led(self, colorcode): """ Set the LED Color of Herkulex Args: colorcode (int): The code for colors (0x00-OFF 0x02-BLUE 0x03-CYAN 0x04-RED 0x05-ORANGE 0x06-VIOLET 0x07-WHITE """ data = [] data.append(0x0A) data.append(self.servoid) data.append(RAM_WRITE_REQ) data.append(LED_CONTROL_RAM) data.append(0x01) data.append(colorcode) send_data(data)
[ "def", "set_led", "(", "self", ",", "colorcode", ")", ":", "data", "=", "[", "]", "data", ".", "append", "(", "0x0A", ")", "data", ".", "append", "(", "self", ".", "servoid", ")", "data", ".", "append", "(", "RAM_WRITE_REQ", ")", "data", ".", "append", "(", "LED_CONTROL_RAM", ")", "data", ".", "append", "(", "0x01", ")", "data", ".", "append", "(", "colorcode", ")", "send_data", "(", "data", ")" ]
30.238095
9.190476
def _get_wv(sentence, ignore=False): ''' get word2vec data by sentence sentence is segmented string. ''' global _vectors vectors = [] for y in sentence: y_ = any2unicode(y).strip() if y_ not in _stopwords: syns = nearby(y_)[0] # print("sentence %s word: %s" %(sentence, y_)) # print("sentence %s word nearby: %s" %(sentence, " ".join(syns))) c = [] try: c.append(_vectors.word_vec(y_)) except KeyError as error: if ignore: continue else: logging.warning("not exist in w2v model: %s" % y_) # c.append(np.zeros((100,), dtype=float)) random_state = np.random.RandomState(seed=(hash(y_) % (2**32 - 1))) c.append(random_state.uniform(low=-10.0, high=10.0, size=(100,))) for n in syns: if n is None: continue try: v = _vectors.word_vec(any2unicode(n)) except KeyError as error: # v = np.zeros((100,), dtype=float) random_state = np.random.RandomState(seed=(hash(n) % (2 ** 32 - 1))) v = random_state.uniform(low=10.0, high=10.0, size=(100,)) c.append(v) r = np.average(c, axis=0) vectors.append(r) return vectors
[ "def", "_get_wv", "(", "sentence", ",", "ignore", "=", "False", ")", ":", "global", "_vectors", "vectors", "=", "[", "]", "for", "y", "in", "sentence", ":", "y_", "=", "any2unicode", "(", "y", ")", ".", "strip", "(", ")", "if", "y_", "not", "in", "_stopwords", ":", "syns", "=", "nearby", "(", "y_", ")", "[", "0", "]", "# print(\"sentence %s word: %s\" %(sentence, y_))", "# print(\"sentence %s word nearby: %s\" %(sentence, \" \".join(syns)))", "c", "=", "[", "]", "try", ":", "c", ".", "append", "(", "_vectors", ".", "word_vec", "(", "y_", ")", ")", "except", "KeyError", "as", "error", ":", "if", "ignore", ":", "continue", "else", ":", "logging", ".", "warning", "(", "\"not exist in w2v model: %s\"", "%", "y_", ")", "# c.append(np.zeros((100,), dtype=float))", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", "seed", "=", "(", "hash", "(", "y_", ")", "%", "(", "2", "**", "32", "-", "1", ")", ")", ")", "c", ".", "append", "(", "random_state", ".", "uniform", "(", "low", "=", "-", "10.0", ",", "high", "=", "10.0", ",", "size", "=", "(", "100", ",", ")", ")", ")", "for", "n", "in", "syns", ":", "if", "n", "is", "None", ":", "continue", "try", ":", "v", "=", "_vectors", ".", "word_vec", "(", "any2unicode", "(", "n", ")", ")", "except", "KeyError", "as", "error", ":", "# v = np.zeros((100,), dtype=float)", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", "seed", "=", "(", "hash", "(", "n", ")", "%", "(", "2", "**", "32", "-", "1", ")", ")", ")", "v", "=", "random_state", ".", "uniform", "(", "low", "=", "10.0", ",", "high", "=", "10.0", ",", "size", "=", "(", "100", ",", ")", ")", "c", ".", "append", "(", "v", ")", "r", "=", "np", ".", "average", "(", "c", ",", "axis", "=", "0", ")", "vectors", ".", "append", "(", "r", ")", "return", "vectors" ]
39.416667
18.694444
def Kerr_factor(final_mass, distance): """Return the factor final_mass/distance (in dimensionless units) for Kerr ringdowns """ # Convert solar masses to meters mass = final_mass * lal.MSUN_SI * lal.G_SI / lal.C_SI**2 # Convert Mpc to meters dist = distance * 1e6 * lal.PC_SI return mass / dist
[ "def", "Kerr_factor", "(", "final_mass", ",", "distance", ")", ":", "# Convert solar masses to meters", "mass", "=", "final_mass", "*", "lal", ".", "MSUN_SI", "*", "lal", ".", "G_SI", "/", "lal", ".", "C_SI", "**", "2", "# Convert Mpc to meters", "dist", "=", "distance", "*", "1e6", "*", "lal", ".", "PC_SI", "return", "mass", "/", "dist" ]
28.909091
15.181818
def InitUI(self): """ Build the mainframe """ menubar = pmag_gui_menu.MagICMenu(self, data_model_num=self.data_model_num) self.SetMenuBar(menubar) #pnl = self.panel #---sizer logo ---- #start_image = wx.Image("/Users/ronshaar/PmagPy/images/logo2.png") #start_image = wx.Image("/Users/Python/simple_examples/001.png") #start_image.Rescale(start_image.GetWidth(), start_image.GetHeight()) #image = wx.BitmapFromImage(start_image) #self.logo = wx.StaticBitmap(self.panel, -1, image) #---sizer 0 ---- bSizer0 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Choose MagIC project directory"), wx.HORIZONTAL) self.dir_path = wx.TextCtrl(self.panel, id=-1, size=(600,25), style=wx.TE_READONLY) self.change_dir_button = buttons.GenButton(self.panel, id=-1, label="change directory",size=(-1, -1)) self.change_dir_button.SetBackgroundColour("#F8F8FF") self.change_dir_button.InitColours() self.Bind(wx.EVT_BUTTON, self.on_change_dir_button, self.change_dir_button) bSizer0.Add(self.change_dir_button, wx.ALIGN_LEFT) bSizer0.AddSpacer(40) bSizer0.Add(self.dir_path,wx.ALIGN_CENTER_VERTICAL) # not fully implemented method for saving/reverting WD # last saved: [] #bSizer0_1 = wx.StaticBoxSizer( wx.StaticBox( self.panel, wx.ID_ANY, "Save MagIC project directory in current state or revert to last-saved state" ), wx.HORIZONTAL ) #saved_label = wx.StaticText(self.panel, -1, "Last saved:", (20, 120)) #self.last_saved_time = wx.TextCtrl(self.panel, id=-1, size=(100,25), style=wx.TE_READONLY) #now = datetime.datetime.now() #now_string = "{}:{}:{}".format(now.hour, now.minute, now.second) #self.last_saved_time.write(now_string) #self.save_dir_button = buttons.GenButton(self.panel, id=-1, label = "save dir", size=(-1, -1)) #self.revert_dir_button = buttons.GenButton(self.panel, id=-1, label = "revert dir", size=(-1, -1)) #self.Bind(wx.EVT_BUTTON, self.on_revert_dir_button, self.revert_dir_button) #self.Bind(wx.EVT_BUTTON, self.on_save_dir_button, self.save_dir_button) #bSizer0_1.Add(saved_label, flag=wx.RIGHT, border=10) #bSizer0_1.Add(self.last_saved_time, flag=wx.RIGHT, border=10) #bSizer0_1.Add(self.save_dir_button,flag=wx.ALIGN_LEFT|wx.RIGHT, border=10) #bSizer0_1.Add(self.revert_dir_button,wx.ALIGN_LEFT) # #---sizer 1 ---- bSizer1 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Import data to working directory"), wx.HORIZONTAL) text = "1. Convert magnetometer files to MagIC format" self.btn1 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 1') self.btn1.SetBackgroundColour("#FDC68A") self.btn1.InitColours() self.Bind(wx.EVT_BUTTON, self.on_convert_file, self.btn1) text = "2. (optional) Calculate geographic/tilt-corrected directions" self.btn2 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 2') self.btn2.SetBackgroundColour("#FDC68A") self.btn2.InitColours() self.Bind(wx.EVT_BUTTON, self.on_btn_orientation, self.btn2) text = "3. (optional) Add MagIC metadata for uploading data to MagIC " self.btn3 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 3') self.btn3.SetBackgroundColour("#FDC68A") self.btn3.InitColours() self.Bind(wx.EVT_BUTTON, self.on_btn_metadata, self.btn3) text = "Unpack txt file downloaded from MagIC" self.btn4 = buttons.GenButton(self.panel, id=-1, label=text, size=(330, 50)) self.btn4.SetBackgroundColour("#FDC68A") self.btn4.InitColours() self.Bind(wx.EVT_BUTTON, self.on_btn_unpack, self.btn4) text = "Convert directory to 3.0. format (legacy data only)" self.btn1a = buttons.GenButton(self.panel, id=-1, label=text, size=(330, 50), name='step 1a') self.btn1a.SetBackgroundColour("#FDC68A") self.btn1a.InitColours() self.Bind(wx.EVT_BUTTON, self.on_btn_convert_3, self.btn1a) #str = "OR" OR = wx.StaticText(self.panel, -1, "or", (20, 120)) font = wx.Font(18, wx.SWISS, wx.NORMAL, wx.NORMAL) OR.SetFont(font) #bSizer0.Add(self.panel,self.btn1,wx.ALIGN_TOP) bSizer1_1 = wx.BoxSizer(wx.VERTICAL) bSizer1_1.AddSpacer(20) bSizer1_1.Add(self.btn1, wx.ALIGN_TOP) bSizer1_1.AddSpacer(20) bSizer1_1.Add(self.btn2, wx.ALIGN_TOP) bSizer1_1.AddSpacer(20) bSizer1_1.Add(self.btn3, wx.ALIGN_TOP) bSizer1_1.AddSpacer(20) bSizer1.Add(bSizer1_1, wx.ALIGN_CENTER, wx.EXPAND) bSizer1.AddSpacer(20) bSizer1.Add(OR, 0, wx.ALIGN_CENTER, 0) bSizer1.AddSpacer(20) bSizer1_2 = wx.BoxSizer(wx.VERTICAL) spacing = 60 #if self.data_model_num == 3 else 90 bSizer1_2.AddSpacer(spacing) bSizer1_2.Add(self.btn4, 0, wx.ALIGN_CENTER, 0) bSizer1_2.AddSpacer(20) bSizer1_2.Add(self.btn1a, 0, wx.ALIGN_CENTER, 0) bSizer1_2.AddSpacer(20) bSizer1.Add(bSizer1_2) bSizer1.AddSpacer(20) #---sizer 2 ---- bSizer2 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Analysis and plots" ), wx.HORIZONTAL) text = "Demag GUI" self.btn_demag_gui = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50), name='demag gui') self.btn_demag_gui.SetBackgroundColour("#6ECFF6") self.btn_demag_gui.InitColours() self.Bind(wx.EVT_BUTTON, self.on_btn_demag_gui, self.btn_demag_gui) text = "Thellier GUI" self.btn_thellier_gui = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50), name='thellier gui') self.btn_thellier_gui.SetBackgroundColour("#6ECFF6") self.btn_thellier_gui.InitColours() self.Bind(wx.EVT_BUTTON, self.on_btn_thellier_gui, self.btn_thellier_gui) bSizer2.AddSpacer(20) bSizer2.Add(self.btn_demag_gui, 0, wx.ALIGN_CENTER, 0) bSizer2.AddSpacer(20) bSizer2.Add(self.btn_thellier_gui, 0, wx.ALIGN_CENTER, 0) bSizer2.AddSpacer(20) #---sizer 3 ---- bSizer3 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Create file for upload to MagIC database"), wx.HORIZONTAL) text = "Create MagIC txt file for upload" self.btn_upload = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50)) self.btn_upload.SetBackgroundColour("#C4DF9B") self.btn_upload.InitColours() bSizer3.AddSpacer(20) bSizer3.Add(self.btn_upload, 0, wx.ALIGN_CENTER, 0) bSizer3.AddSpacer(20) self.Bind(wx.EVT_BUTTON, self.on_btn_upload, self.btn_upload) #---arange sizers ---- hbox = wx.BoxSizer(wx.HORIZONTAL) vbox = wx.BoxSizer(wx.VERTICAL) vbox.AddSpacer(5) #vbox.Add(self.logo,0,wx.ALIGN_CENTER,0) vbox.AddSpacer(5) vbox.Add(bSizer0, 0, wx.ALIGN_CENTER, 0) vbox.AddSpacer(10) #vbox.Add(bSizer0_1, 0, wx.ALIGN_CENTER, 0) #vbox.AddSpacer(10) vbox.Add(bSizer1, 0, wx.ALIGN_CENTER, 0) vbox.AddSpacer(10) vbox.Add(bSizer2, 0, wx.ALIGN_CENTER, 0) vbox.AddSpacer(10) vbox.Add(bSizer3, 0, wx.ALIGN_CENTER, 0) vbox.AddSpacer(10) hbox.AddSpacer(10) hbox.Add(vbox, 0, wx.ALIGN_CENTER, 0) hbox.AddSpacer(5) self.panel.SetSizer(hbox) hbox.Fit(self)
[ "def", "InitUI", "(", "self", ")", ":", "menubar", "=", "pmag_gui_menu", ".", "MagICMenu", "(", "self", ",", "data_model_num", "=", "self", ".", "data_model_num", ")", "self", ".", "SetMenuBar", "(", "menubar", ")", "#pnl = self.panel", "#---sizer logo ----", "#start_image = wx.Image(\"/Users/ronshaar/PmagPy/images/logo2.png\")", "#start_image = wx.Image(\"/Users/Python/simple_examples/001.png\")", "#start_image.Rescale(start_image.GetWidth(), start_image.GetHeight())", "#image = wx.BitmapFromImage(start_image)", "#self.logo = wx.StaticBitmap(self.panel, -1, image)", "#---sizer 0 ----", "bSizer0", "=", "wx", ".", "StaticBoxSizer", "(", "wx", ".", "StaticBox", "(", "self", ".", "panel", ",", "wx", ".", "ID_ANY", ",", "\"Choose MagIC project directory\"", ")", ",", "wx", ".", "HORIZONTAL", ")", "self", ".", "dir_path", "=", "wx", ".", "TextCtrl", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "size", "=", "(", "600", ",", "25", ")", ",", "style", "=", "wx", ".", "TE_READONLY", ")", "self", ".", "change_dir_button", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "\"change directory\"", ",", "size", "=", "(", "-", "1", ",", "-", "1", ")", ")", "self", ".", "change_dir_button", ".", "SetBackgroundColour", "(", "\"#F8F8FF\"", ")", "self", ".", "change_dir_button", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_change_dir_button", ",", "self", ".", "change_dir_button", ")", "bSizer0", ".", "Add", "(", "self", ".", "change_dir_button", ",", "wx", ".", "ALIGN_LEFT", ")", "bSizer0", ".", "AddSpacer", "(", "40", ")", "bSizer0", ".", "Add", "(", "self", ".", "dir_path", ",", "wx", ".", "ALIGN_CENTER_VERTICAL", ")", "# not fully implemented method for saving/reverting WD", "# last saved: []", "#bSizer0_1 = wx.StaticBoxSizer( wx.StaticBox( self.panel, wx.ID_ANY, \"Save MagIC project directory in current state or revert to last-saved state\" ), wx.HORIZONTAL )", "#saved_label = wx.StaticText(self.panel, -1, \"Last saved:\", (20, 120))", "#self.last_saved_time = wx.TextCtrl(self.panel, id=-1, size=(100,25), style=wx.TE_READONLY)", "#now = datetime.datetime.now()", "#now_string = \"{}:{}:{}\".format(now.hour, now.minute, now.second)", "#self.last_saved_time.write(now_string)", "#self.save_dir_button = buttons.GenButton(self.panel, id=-1, label = \"save dir\", size=(-1, -1))", "#self.revert_dir_button = buttons.GenButton(self.panel, id=-1, label = \"revert dir\", size=(-1, -1))", "#self.Bind(wx.EVT_BUTTON, self.on_revert_dir_button, self.revert_dir_button)", "#self.Bind(wx.EVT_BUTTON, self.on_save_dir_button, self.save_dir_button)", "#bSizer0_1.Add(saved_label, flag=wx.RIGHT, border=10)", "#bSizer0_1.Add(self.last_saved_time, flag=wx.RIGHT, border=10)", "#bSizer0_1.Add(self.save_dir_button,flag=wx.ALIGN_LEFT|wx.RIGHT, border=10)", "#bSizer0_1.Add(self.revert_dir_button,wx.ALIGN_LEFT)", "#", "#---sizer 1 ----", "bSizer1", "=", "wx", ".", "StaticBoxSizer", "(", "wx", ".", "StaticBox", "(", "self", ".", "panel", ",", "wx", ".", "ID_ANY", ",", "\"Import data to working directory\"", ")", ",", "wx", ".", "HORIZONTAL", ")", "text", "=", "\"1. Convert magnetometer files to MagIC format\"", "self", ".", "btn1", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "450", ",", "50", ")", ",", "name", "=", "'step 1'", ")", "self", ".", "btn1", ".", "SetBackgroundColour", "(", "\"#FDC68A\"", ")", "self", ".", "btn1", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_convert_file", ",", "self", ".", "btn1", ")", "text", "=", "\"2. (optional) Calculate geographic/tilt-corrected directions\"", "self", ".", "btn2", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "450", ",", "50", ")", ",", "name", "=", "'step 2'", ")", "self", ".", "btn2", ".", "SetBackgroundColour", "(", "\"#FDC68A\"", ")", "self", ".", "btn2", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_orientation", ",", "self", ".", "btn2", ")", "text", "=", "\"3. (optional) Add MagIC metadata for uploading data to MagIC \"", "self", ".", "btn3", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "450", ",", "50", ")", ",", "name", "=", "'step 3'", ")", "self", ".", "btn3", ".", "SetBackgroundColour", "(", "\"#FDC68A\"", ")", "self", ".", "btn3", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_metadata", ",", "self", ".", "btn3", ")", "text", "=", "\"Unpack txt file downloaded from MagIC\"", "self", ".", "btn4", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "330", ",", "50", ")", ")", "self", ".", "btn4", ".", "SetBackgroundColour", "(", "\"#FDC68A\"", ")", "self", ".", "btn4", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_unpack", ",", "self", ".", "btn4", ")", "text", "=", "\"Convert directory to 3.0. format (legacy data only)\"", "self", ".", "btn1a", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "330", ",", "50", ")", ",", "name", "=", "'step 1a'", ")", "self", ".", "btn1a", ".", "SetBackgroundColour", "(", "\"#FDC68A\"", ")", "self", ".", "btn1a", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_convert_3", ",", "self", ".", "btn1a", ")", "#str = \"OR\"", "OR", "=", "wx", ".", "StaticText", "(", "self", ".", "panel", ",", "-", "1", ",", "\"or\"", ",", "(", "20", ",", "120", ")", ")", "font", "=", "wx", ".", "Font", "(", "18", ",", "wx", ".", "SWISS", ",", "wx", ".", "NORMAL", ",", "wx", ".", "NORMAL", ")", "OR", ".", "SetFont", "(", "font", ")", "#bSizer0.Add(self.panel,self.btn1,wx.ALIGN_TOP)", "bSizer1_1", "=", "wx", ".", "BoxSizer", "(", "wx", ".", "VERTICAL", ")", "bSizer1_1", ".", "AddSpacer", "(", "20", ")", "bSizer1_1", ".", "Add", "(", "self", ".", "btn1", ",", "wx", ".", "ALIGN_TOP", ")", "bSizer1_1", ".", "AddSpacer", "(", "20", ")", "bSizer1_1", ".", "Add", "(", "self", ".", "btn2", ",", "wx", ".", "ALIGN_TOP", ")", "bSizer1_1", ".", "AddSpacer", "(", "20", ")", "bSizer1_1", ".", "Add", "(", "self", ".", "btn3", ",", "wx", ".", "ALIGN_TOP", ")", "bSizer1_1", ".", "AddSpacer", "(", "20", ")", "bSizer1", ".", "Add", "(", "bSizer1_1", ",", "wx", ".", "ALIGN_CENTER", ",", "wx", ".", "EXPAND", ")", "bSizer1", ".", "AddSpacer", "(", "20", ")", "bSizer1", ".", "Add", "(", "OR", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "bSizer1", ".", "AddSpacer", "(", "20", ")", "bSizer1_2", "=", "wx", ".", "BoxSizer", "(", "wx", ".", "VERTICAL", ")", "spacing", "=", "60", "#if self.data_model_num == 3 else 90", "bSizer1_2", ".", "AddSpacer", "(", "spacing", ")", "bSizer1_2", ".", "Add", "(", "self", ".", "btn4", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "bSizer1_2", ".", "AddSpacer", "(", "20", ")", "bSizer1_2", ".", "Add", "(", "self", ".", "btn1a", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "bSizer1_2", ".", "AddSpacer", "(", "20", ")", "bSizer1", ".", "Add", "(", "bSizer1_2", ")", "bSizer1", ".", "AddSpacer", "(", "20", ")", "#---sizer 2 ----", "bSizer2", "=", "wx", ".", "StaticBoxSizer", "(", "wx", ".", "StaticBox", "(", "self", ".", "panel", ",", "wx", ".", "ID_ANY", ",", "\"Analysis and plots\"", ")", ",", "wx", ".", "HORIZONTAL", ")", "text", "=", "\"Demag GUI\"", "self", ".", "btn_demag_gui", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "300", ",", "50", ")", ",", "name", "=", "'demag gui'", ")", "self", ".", "btn_demag_gui", ".", "SetBackgroundColour", "(", "\"#6ECFF6\"", ")", "self", ".", "btn_demag_gui", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_demag_gui", ",", "self", ".", "btn_demag_gui", ")", "text", "=", "\"Thellier GUI\"", "self", ".", "btn_thellier_gui", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "300", ",", "50", ")", ",", "name", "=", "'thellier gui'", ")", "self", ".", "btn_thellier_gui", ".", "SetBackgroundColour", "(", "\"#6ECFF6\"", ")", "self", ".", "btn_thellier_gui", ".", "InitColours", "(", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_thellier_gui", ",", "self", ".", "btn_thellier_gui", ")", "bSizer2", ".", "AddSpacer", "(", "20", ")", "bSizer2", ".", "Add", "(", "self", ".", "btn_demag_gui", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "bSizer2", ".", "AddSpacer", "(", "20", ")", "bSizer2", ".", "Add", "(", "self", ".", "btn_thellier_gui", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "bSizer2", ".", "AddSpacer", "(", "20", ")", "#---sizer 3 ----", "bSizer3", "=", "wx", ".", "StaticBoxSizer", "(", "wx", ".", "StaticBox", "(", "self", ".", "panel", ",", "wx", ".", "ID_ANY", ",", "\"Create file for upload to MagIC database\"", ")", ",", "wx", ".", "HORIZONTAL", ")", "text", "=", "\"Create MagIC txt file for upload\"", "self", ".", "btn_upload", "=", "buttons", ".", "GenButton", "(", "self", ".", "panel", ",", "id", "=", "-", "1", ",", "label", "=", "text", ",", "size", "=", "(", "300", ",", "50", ")", ")", "self", ".", "btn_upload", ".", "SetBackgroundColour", "(", "\"#C4DF9B\"", ")", "self", ".", "btn_upload", ".", "InitColours", "(", ")", "bSizer3", ".", "AddSpacer", "(", "20", ")", "bSizer3", ".", "Add", "(", "self", ".", "btn_upload", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "bSizer3", ".", "AddSpacer", "(", "20", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "on_btn_upload", ",", "self", ".", "btn_upload", ")", "#---arange sizers ----", "hbox", "=", "wx", ".", "BoxSizer", "(", "wx", ".", "HORIZONTAL", ")", "vbox", "=", "wx", ".", "BoxSizer", "(", "wx", ".", "VERTICAL", ")", "vbox", ".", "AddSpacer", "(", "5", ")", "#vbox.Add(self.logo,0,wx.ALIGN_CENTER,0)", "vbox", ".", "AddSpacer", "(", "5", ")", "vbox", ".", "Add", "(", "bSizer0", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "vbox", ".", "AddSpacer", "(", "10", ")", "#vbox.Add(bSizer0_1, 0, wx.ALIGN_CENTER, 0)", "#vbox.AddSpacer(10)", "vbox", ".", "Add", "(", "bSizer1", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "vbox", ".", "AddSpacer", "(", "10", ")", "vbox", ".", "Add", "(", "bSizer2", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "vbox", ".", "AddSpacer", "(", "10", ")", "vbox", ".", "Add", "(", "bSizer3", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "vbox", ".", "AddSpacer", "(", "10", ")", "hbox", ".", "AddSpacer", "(", "10", ")", "hbox", ".", "Add", "(", "vbox", ",", "0", ",", "wx", ".", "ALIGN_CENTER", ",", "0", ")", "hbox", ".", "AddSpacer", "(", "5", ")", "self", ".", "panel", ".", "SetSizer", "(", "hbox", ")", "hbox", ".", "Fit", "(", "self", ")" ]
42.719101
25.797753
def get_authorization_url(self, signin_with_twitter=False, access_type=None): """Get the authorization URL to redirect the user""" try: if signin_with_twitter: url = self._get_oauth_url('authenticate') if access_type: logging.warning(WARNING_MESSAGE) else: url = self._get_oauth_url('authorize') self.request_token = self._get_request_token(access_type=access_type) return self.oauth.authorization_url(url) except Exception as e: raise TweepError(e)
[ "def", "get_authorization_url", "(", "self", ",", "signin_with_twitter", "=", "False", ",", "access_type", "=", "None", ")", ":", "try", ":", "if", "signin_with_twitter", ":", "url", "=", "self", ".", "_get_oauth_url", "(", "'authenticate'", ")", "if", "access_type", ":", "logging", ".", "warning", "(", "WARNING_MESSAGE", ")", "else", ":", "url", "=", "self", ".", "_get_oauth_url", "(", "'authorize'", ")", "self", ".", "request_token", "=", "self", ".", "_get_request_token", "(", "access_type", "=", "access_type", ")", "return", "self", ".", "oauth", ".", "authorization_url", "(", "url", ")", "except", "Exception", "as", "e", ":", "raise", "TweepError", "(", "e", ")" ]
43.133333
14.2
def cmp_private_numbers(pn1, pn2): """ Compare 2 sets of private numbers. This is for comparing 2 private RSA keys. :param pn1: The set of values belonging to the 1st key :param pn2: The set of values belonging to the 2nd key :return: True is the sets are the same otherwise False. """ if not cmp_public_numbers(pn1.public_numbers, pn2.public_numbers): return False for param in ['d', 'p', 'q']: if getattr(pn1, param) != getattr(pn2, param): return False return True
[ "def", "cmp_private_numbers", "(", "pn1", ",", "pn2", ")", ":", "if", "not", "cmp_public_numbers", "(", "pn1", ".", "public_numbers", ",", "pn2", ".", "public_numbers", ")", ":", "return", "False", "for", "param", "in", "[", "'d'", ",", "'p'", ",", "'q'", "]", ":", "if", "getattr", "(", "pn1", ",", "param", ")", "!=", "getattr", "(", "pn2", ",", "param", ")", ":", "return", "False", "return", "True" ]
32.625
18.375
def ref_context_from_geoloc(geoloc): """Return a RefContext object given a geoloc entry.""" text = geoloc.get('text') geoid = geoloc.get('geoID') rc = RefContext(name=text, db_refs={'GEOID': geoid}) return rc
[ "def", "ref_context_from_geoloc", "(", "geoloc", ")", ":", "text", "=", "geoloc", ".", "get", "(", "'text'", ")", "geoid", "=", "geoloc", ".", "get", "(", "'geoID'", ")", "rc", "=", "RefContext", "(", "name", "=", "text", ",", "db_refs", "=", "{", "'GEOID'", ":", "geoid", "}", ")", "return", "rc" ]
37.166667
11.166667
def patch_file_open(): # pragma: no cover """A Monkey patch to log opening and closing of files, which is useful for debugging file descriptor exhaustion.""" openfiles = set() oldfile = builtins.file class newfile(oldfile): def __init__(self, *args, **kwargs): self.x = args[0] all_fds = count_open_fds() print('### {} OPENING {} ( {} total )###'.format( len(openfiles), str(self.x), all_fds)) oldfile.__init__(self, *args, **kwargs) openfiles.add(self) def close(self): print('### {} CLOSING {} ###'.format(len(openfiles), str(self.x))) oldfile.close(self) openfiles.remove(self) def newopen(*args, **kwargs): return newfile(*args, **kwargs) builtins.file = newfile builtins.open = newopen
[ "def", "patch_file_open", "(", ")", ":", "# pragma: no cover", "openfiles", "=", "set", "(", ")", "oldfile", "=", "builtins", ".", "file", "class", "newfile", "(", "oldfile", ")", ":", "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "x", "=", "args", "[", "0", "]", "all_fds", "=", "count_open_fds", "(", ")", "print", "(", "'### {} OPENING {} ( {} total )###'", ".", "format", "(", "len", "(", "openfiles", ")", ",", "str", "(", "self", ".", "x", ")", ",", "all_fds", ")", ")", "oldfile", ".", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", "openfiles", ".", "add", "(", "self", ")", "def", "close", "(", "self", ")", ":", "print", "(", "'### {} CLOSING {} ###'", ".", "format", "(", "len", "(", "openfiles", ")", ",", "str", "(", "self", ".", "x", ")", ")", ")", "oldfile", ".", "close", "(", "self", ")", "openfiles", ".", "remove", "(", "self", ")", "def", "newopen", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "newfile", "(", "*", "args", ",", "*", "*", "kwargs", ")", "builtins", ".", "file", "=", "newfile", "builtins", ".", "open", "=", "newopen" ]
28.896552
18.689655
def stats(self, **kwargs): """ Stream statistics for this container. Similar to the ``docker stats`` command. Args: decode (bool): If set to true, stream will be decoded into dicts on the fly. Only applicable if ``stream`` is True. False by default. stream (bool): If set to false, only the current stats will be returned instead of a stream. True by default. Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ return self.client.api.stats(self.id, **kwargs)
[ "def", "stats", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "client", ".", "api", ".", "stats", "(", "self", ".", "id", ",", "*", "*", "kwargs", ")" ]
36.941176
19.176471
def set_basic_params( self, spawn_on_request=None, cheaper_algo=None, workers_min=None, workers_startup=None, workers_step=None): """ :param bool spawn_on_request: Spawn workers only after the first request. :param Algo cheaper_algo: The algorithm object to be used used for adaptive process spawning. Default: ``spare``. See ``.algorithms``. :param int workers_min: Minimal workers count. Enables cheaper mode (adaptive process spawning). .. note:: Must be lower than max workers count. :param int workers_startup: The number of workers to be started when starting the application. After the app is started the algorithm can stop or start workers if needed. :param int workers_step: Number of additional processes to spawn at a time if they are needed, """ self._set('cheap', spawn_on_request, cast=bool) if cheaper_algo: self._set('cheaper-algo', cheaper_algo.name) if cheaper_algo.plugin: self._section.set_plugins_params(plugins=cheaper_algo.plugin) cheaper_algo._contribute_to_opts(self) self._set('cheaper', workers_min) self._set('cheaper-initial', workers_startup) self._set('cheaper-step', workers_step) return self._section
[ "def", "set_basic_params", "(", "self", ",", "spawn_on_request", "=", "None", ",", "cheaper_algo", "=", "None", ",", "workers_min", "=", "None", ",", "workers_startup", "=", "None", ",", "workers_step", "=", "None", ")", ":", "self", ".", "_set", "(", "'cheap'", ",", "spawn_on_request", ",", "cast", "=", "bool", ")", "if", "cheaper_algo", ":", "self", ".", "_set", "(", "'cheaper-algo'", ",", "cheaper_algo", ".", "name", ")", "if", "cheaper_algo", ".", "plugin", ":", "self", ".", "_section", ".", "set_plugins_params", "(", "plugins", "=", "cheaper_algo", ".", "plugin", ")", "cheaper_algo", ".", "_contribute_to_opts", "(", "self", ")", "self", ".", "_set", "(", "'cheaper'", ",", "workers_min", ")", "self", ".", "_set", "(", "'cheaper-initial'", ",", "workers_startup", ")", "self", ".", "_set", "(", "'cheaper-step'", ",", "workers_step", ")", "return", "self", ".", "_section" ]
39.029412
29.676471
def readFILTERLIST(self): """ Read a length-prefixed list of FILTERs """ number = self.readUI8() return [self.readFILTER() for _ in range(number)]
[ "def", "readFILTERLIST", "(", "self", ")", ":", "number", "=", "self", ".", "readUI8", "(", ")", "return", "[", "self", ".", "readFILTER", "(", ")", "for", "_", "in", "range", "(", "number", ")", "]" ]
41.75
10.25
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): """Simple method to exact date values from a Plist. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. """ for root, key, datetime_value in interface.RecurseKey(top_level): if not isinstance(datetime_value, datetime.datetime): continue event_data = plist_event.PlistTimeEventData() event_data.key = key event_data.root = root event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
[ "def", "GetEntries", "(", "self", ",", "parser_mediator", ",", "top_level", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "for", "root", ",", "key", ",", "datetime_value", "in", "interface", ".", "RecurseKey", "(", "top_level", ")", ":", "if", "not", "isinstance", "(", "datetime_value", ",", "datetime", ".", "datetime", ")", ":", "continue", "event_data", "=", "plist_event", ".", "PlistTimeEventData", "(", ")", "event_data", ".", "key", "=", "key", "event_data", ".", "root", "=", "root", "event", "=", "time_events", ".", "PythonDatetimeEvent", "(", "datetime_value", ",", "definitions", ".", "TIME_DESCRIPTION_WRITTEN", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")" ]
39.894737
22
def handle(self, locale, app=None, **options): """ command execution """ translation.activate(settings.LANGUAGE_CODE) if app: unpack = app.split('.') if len(unpack) == 2: models = [get_model(unpack[0], unpack[1])] elif len(unpack) == 1: models = get_models(get_app(unpack[0])) else: models = get_models() for model in models: if hasattr(model, 'localized_fields'): i18n.unregister(model) model_full_name = '%s.%s' % (model._meta.app_label, model._meta.module_name) update_instances = set() messages = [] for instance in model.objects.all(): for field in model.localized_fields: local_field_name = get_real_fieldname(field, locale) if hasattr(instance, local_field_name): local_field_value = getattr(instance, local_field_name) if local_field_value not in (None, u''): setattr(instance, local_field_name, None) update_instances.add(instance) messages.append(u"%s %s %s : %s will become reset to None" % (model_full_name, instance, local_field_name, force_unicode(local_field_value))) if len(update_instances): if self.ask_for_confirmation(messages, u'%s.%s' % (model._meta.app_label, model._meta.module_name)): for update_instance in update_instances: print u"resetting %s" % update_instance update_instance.save()
[ "def", "handle", "(", "self", ",", "locale", ",", "app", "=", "None", ",", "*", "*", "options", ")", ":", "translation", ".", "activate", "(", "settings", ".", "LANGUAGE_CODE", ")", "if", "app", ":", "unpack", "=", "app", ".", "split", "(", "'.'", ")", "if", "len", "(", "unpack", ")", "==", "2", ":", "models", "=", "[", "get_model", "(", "unpack", "[", "0", "]", ",", "unpack", "[", "1", "]", ")", "]", "elif", "len", "(", "unpack", ")", "==", "1", ":", "models", "=", "get_models", "(", "get_app", "(", "unpack", "[", "0", "]", ")", ")", "else", ":", "models", "=", "get_models", "(", ")", "for", "model", "in", "models", ":", "if", "hasattr", "(", "model", ",", "'localized_fields'", ")", ":", "i18n", ".", "unregister", "(", "model", ")", "model_full_name", "=", "'%s.%s'", "%", "(", "model", ".", "_meta", ".", "app_label", ",", "model", ".", "_meta", ".", "module_name", ")", "update_instances", "=", "set", "(", ")", "messages", "=", "[", "]", "for", "instance", "in", "model", ".", "objects", ".", "all", "(", ")", ":", "for", "field", "in", "model", ".", "localized_fields", ":", "local_field_name", "=", "get_real_fieldname", "(", "field", ",", "locale", ")", "if", "hasattr", "(", "instance", ",", "local_field_name", ")", ":", "local_field_value", "=", "getattr", "(", "instance", ",", "local_field_name", ")", "if", "local_field_value", "not", "in", "(", "None", ",", "u''", ")", ":", "setattr", "(", "instance", ",", "local_field_name", ",", "None", ")", "update_instances", ".", "add", "(", "instance", ")", "messages", ".", "append", "(", "u\"%s %s %s : %s will become reset to None\"", "%", "(", "model_full_name", ",", "instance", ",", "local_field_name", ",", "force_unicode", "(", "local_field_value", ")", ")", ")", "if", "len", "(", "update_instances", ")", ":", "if", "self", ".", "ask_for_confirmation", "(", "messages", ",", "u'%s.%s'", "%", "(", "model", ".", "_meta", ".", "app_label", ",", "model", ".", "_meta", ".", "module_name", ")", ")", ":", "for", "update_instance", "in", "update_instances", ":", "print", "u\"resetting %s\"", "%", "update_instance", "update_instance", ".", "save", "(", ")" ]
49.621622
23.810811
def connect(self): "Initiate the connection to a proxying hub" log.info("connecting") # don't have the connection attempt reconnects, because when it goes # down we are going to cycle to the next potential peer from the Client self._peer = connection.Peer( None, self._dispatcher, self._addrs.popleft(), backend.Socket(), reconnect=False) self._peer.start()
[ "def", "connect", "(", "self", ")", ":", "log", ".", "info", "(", "\"connecting\"", ")", "# don't have the connection attempt reconnects, because when it goes", "# down we are going to cycle to the next potential peer from the Client", "self", ".", "_peer", "=", "connection", ".", "Peer", "(", "None", ",", "self", ".", "_dispatcher", ",", "self", ".", "_addrs", ".", "popleft", "(", ")", ",", "backend", ".", "Socket", "(", ")", ",", "reconnect", "=", "False", ")", "self", ".", "_peer", ".", "start", "(", ")" ]
42.9
20.7
def is_excluded(root, excludes): # type: (unicode, List[unicode]) -> bool """Check if the directory is in the exclude list. Note: by having trailing slashes, we avoid common prefix issues, like e.g. an exlude "foo" also accidentally excluding "foobar". """ for exclude in excludes: if fnmatch(root, exclude): return True return False
[ "def", "is_excluded", "(", "root", ",", "excludes", ")", ":", "# type: (unicode, List[unicode]) -> bool", "for", "exclude", "in", "excludes", ":", "if", "fnmatch", "(", "root", ",", "exclude", ")", ":", "return", "True", "return", "False" ]
34.363636
15.636364
def add(self, a, b): """ Parameters: - a - b """ self.send_add(a, b) return self.recv_add()
[ "def", "add", "(", "self", ",", "a", ",", "b", ")", ":", "self", ".", "send_add", "(", "a", ",", "b", ")", "return", "self", ".", "recv_add", "(", ")" ]
17.75
15
def watch_log_for_alive(self, nodes, from_mark=None, timeout=720, filename='system.log'): """ Watch the log of this node until it detects that the provided other nodes are marked UP. This method works similarly to watch_log_for_death. We want to provide a higher default timeout when this is called on DSE. """ super(DseNode, self).watch_log_for_alive(nodes, from_mark=from_mark, timeout=timeout, filename=filename)
[ "def", "watch_log_for_alive", "(", "self", ",", "nodes", ",", "from_mark", "=", "None", ",", "timeout", "=", "720", ",", "filename", "=", "'system.log'", ")", ":", "super", "(", "DseNode", ",", "self", ")", ".", "watch_log_for_alive", "(", "nodes", ",", "from_mark", "=", "from_mark", ",", "timeout", "=", "timeout", ",", "filename", "=", "filename", ")" ]
57.125
34.375
def GetDevicePath(device_handle): """Obtains the unique path for the device. Args: device_handle: reference to the device Returns: A unique path for the device, obtained from the IO Registry """ # Obtain device path from IO Registry io_service_obj = iokit.IOHIDDeviceGetService(device_handle) str_buffer = ctypes.create_string_buffer(DEVICE_PATH_BUFFER_SIZE) iokit.IORegistryEntryGetPath(io_service_obj, K_IO_SERVICE_PLANE, str_buffer) return str_buffer.value
[ "def", "GetDevicePath", "(", "device_handle", ")", ":", "# Obtain device path from IO Registry", "io_service_obj", "=", "iokit", ".", "IOHIDDeviceGetService", "(", "device_handle", ")", "str_buffer", "=", "ctypes", ".", "create_string_buffer", "(", "DEVICE_PATH_BUFFER_SIZE", ")", "iokit", ".", "IORegistryEntryGetPath", "(", "io_service_obj", ",", "K_IO_SERVICE_PLANE", ",", "str_buffer", ")", "return", "str_buffer", ".", "value" ]
29.625
22.3125
def _is_noop_block(arch, block): """ Check if the block is a no-op block by checking VEX statements. :param block: The VEX block instance. :return: True if the entire block is a single-byte or multi-byte nop instruction, False otherwise. :rtype: bool """ if arch.name == "MIPS32": if arch.memory_endness == "Iend_BE": MIPS32_BE_NOOPS = { b"\x00\x20\x08\x25", # move $at, $at } insns = set(block.bytes[i:i+4] for i in range(0, block.size, 4)) if MIPS32_BE_NOOPS.issuperset(insns): return True # Fallback # the block is a noop block if it only has IMark statements if all((type(stmt) is pyvex.IRStmt.IMark) for stmt in block.vex.statements): return True return False
[ "def", "_is_noop_block", "(", "arch", ",", "block", ")", ":", "if", "arch", ".", "name", "==", "\"MIPS32\"", ":", "if", "arch", ".", "memory_endness", "==", "\"Iend_BE\"", ":", "MIPS32_BE_NOOPS", "=", "{", "b\"\\x00\\x20\\x08\\x25\"", ",", "# move $at, $at", "}", "insns", "=", "set", "(", "block", ".", "bytes", "[", "i", ":", "i", "+", "4", "]", "for", "i", "in", "range", "(", "0", ",", "block", ".", "size", ",", "4", ")", ")", "if", "MIPS32_BE_NOOPS", ".", "issuperset", "(", "insns", ")", ":", "return", "True", "# Fallback", "# the block is a noop block if it only has IMark statements", "if", "all", "(", "(", "type", "(", "stmt", ")", "is", "pyvex", ".", "IRStmt", ".", "IMark", ")", "for", "stmt", "in", "block", ".", "vex", ".", "statements", ")", ":", "return", "True", "return", "False" ]
35.916667
22.583333
def Process(self, parser_mediator, registry_key, **kwargs): """Processes a Windows Registry key or value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Raises: ValueError: If the Windows Registry key is not set. """ if registry_key is None: raise ValueError('Windows Registry key is not set.') # This will raise if unhandled keyword arguments are passed. super(WindowsRegistryPlugin, self).Process(parser_mediator, **kwargs) self.ExtractEvents(parser_mediator, registry_key, **kwargs)
[ "def", "Process", "(", "self", ",", "parser_mediator", ",", "registry_key", ",", "*", "*", "kwargs", ")", ":", "if", "registry_key", "is", "None", ":", "raise", "ValueError", "(", "'Windows Registry key is not set.'", ")", "# This will raise if unhandled keyword arguments are passed.", "super", "(", "WindowsRegistryPlugin", ",", "self", ")", ".", "Process", "(", "parser_mediator", ",", "*", "*", "kwargs", ")", "self", ".", "ExtractEvents", "(", "parser_mediator", ",", "registry_key", ",", "*", "*", "kwargs", ")" ]
37.777778
24.888889
def get_function_call_str(fn, args, kwargs): """Converts method call (function and its arguments) to a str(...)-like string.""" def str_converter(v): try: return str(v) except Exception: try: return repr(v) except Exception: return "<n/a str raised>" result = get_full_name(fn) + "(" first = True for v in args: if first: first = False else: result += "," result += str_converter(v) for k, v in kwargs.items(): if first: first = False else: result += "," result += str(k) + "=" + str_converter(v) result += ")" return result
[ "def", "get_function_call_str", "(", "fn", ",", "args", ",", "kwargs", ")", ":", "def", "str_converter", "(", "v", ")", ":", "try", ":", "return", "str", "(", "v", ")", "except", "Exception", ":", "try", ":", "return", "repr", "(", "v", ")", "except", "Exception", ":", "return", "\"<n/a str raised>\"", "result", "=", "get_full_name", "(", "fn", ")", "+", "\"(\"", "first", "=", "True", "for", "v", "in", "args", ":", "if", "first", ":", "first", "=", "False", "else", ":", "result", "+=", "\",\"", "result", "+=", "str_converter", "(", "v", ")", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "if", "first", ":", "first", "=", "False", "else", ":", "result", "+=", "\",\"", "result", "+=", "str", "(", "k", ")", "+", "\"=\"", "+", "str_converter", "(", "v", ")", "result", "+=", "\")\"", "return", "result" ]
25.392857
17.25
def get_all(self, type_name, base_fields=None, the_filter=None, nested_fields=None): """Get the resource by resource id. :param nested_fields: nested resource fields :param base_fields: fields of this resource :param the_filter: dictionary of filter like `{'name': 'abc'}` :param type_name: Resource type. For example, pool, lun, nasServer. :return: List of resource class objects """ fields = self.get_fields(type_name, base_fields, nested_fields) the_filter = self.dict_to_filter_string(the_filter) url = '/api/types/{}/instances'.format(type_name) resp = self.rest_get(url, fields=fields, filter=the_filter) ret = resp while resp.has_next_page: resp = self.rest_get(url, fields=fields, filter=the_filter, page=resp.next_page) ret.entries.extend(resp.entries) return ret
[ "def", "get_all", "(", "self", ",", "type_name", ",", "base_fields", "=", "None", ",", "the_filter", "=", "None", ",", "nested_fields", "=", "None", ")", ":", "fields", "=", "self", ".", "get_fields", "(", "type_name", ",", "base_fields", ",", "nested_fields", ")", "the_filter", "=", "self", ".", "dict_to_filter_string", "(", "the_filter", ")", "url", "=", "'/api/types/{}/instances'", ".", "format", "(", "type_name", ")", "resp", "=", "self", ".", "rest_get", "(", "url", ",", "fields", "=", "fields", ",", "filter", "=", "the_filter", ")", "ret", "=", "resp", "while", "resp", ".", "has_next_page", ":", "resp", "=", "self", ".", "rest_get", "(", "url", ",", "fields", "=", "fields", ",", "filter", "=", "the_filter", ",", "page", "=", "resp", ".", "next_page", ")", "ret", ".", "entries", ".", "extend", "(", "resp", ".", "entries", ")", "return", "ret" ]
42.681818
19.772727
def add_emission(self, chunksize=2**19, comp_filter=default_compression, overwrite=False, params=dict(), chunkslice='bytes'): """Add the `emission` array in '/trajectories'. """ nparams = self.numeric_params num_particles = nparams['np'] return self.add_trajectory('emission', shape=(num_particles, 0), overwrite=overwrite, chunksize=chunksize, comp_filter=comp_filter, atom=tables.Float32Atom(), title='Emission trace of each particle', params=params)
[ "def", "add_emission", "(", "self", ",", "chunksize", "=", "2", "**", "19", ",", "comp_filter", "=", "default_compression", ",", "overwrite", "=", "False", ",", "params", "=", "dict", "(", ")", ",", "chunkslice", "=", "'bytes'", ")", ":", "nparams", "=", "self", ".", "numeric_params", "num_particles", "=", "nparams", "[", "'np'", "]", "return", "self", ".", "add_trajectory", "(", "'emission'", ",", "shape", "=", "(", "num_particles", ",", "0", ")", ",", "overwrite", "=", "overwrite", ",", "chunksize", "=", "chunksize", ",", "comp_filter", "=", "comp_filter", ",", "atom", "=", "tables", ".", "Float32Atom", "(", ")", ",", "title", "=", "'Emission trace of each particle'", ",", "params", "=", "params", ")" ]
52.076923
20.230769
def rowCount(self, parent): """Reimplemented from QtCore.QAbstractItemModel""" if not parent.isValid(): v = self._conf else: v = self.get_value(parent) if isinstance(v, Section): return len(v.keys()) else: return 0
[ "def", "rowCount", "(", "self", ",", "parent", ")", ":", "if", "not", "parent", ".", "isValid", "(", ")", ":", "v", "=", "self", ".", "_conf", "else", ":", "v", "=", "self", ".", "get_value", "(", "parent", ")", "if", "isinstance", "(", "v", ",", "Section", ")", ":", "return", "len", "(", "v", ".", "keys", "(", ")", ")", "else", ":", "return", "0" ]
29.3
12.5
def get_SZ(self, psd, geometry): """ Compute the scattering matrices for the given PSD and geometries. Returns: The new amplitude (S) and phase (Z) matrices. """ if (self._S_table is None) or (self._Z_table is None): raise AttributeError( "Initialize or load the scattering table first.") if (not isinstance(psd, PSD)) or self._previous_psd != psd: self._S_dict = {} self._Z_dict = {} psd_w = psd(self._psd_D) for geom in self.geometries: self._S_dict[geom] = \ trapz(self._S_table[geom] * psd_w, self._psd_D) self._Z_dict[geom] = \ trapz(self._Z_table[geom] * psd_w, self._psd_D) self._previous_psd = psd return (self._S_dict[geometry], self._Z_dict[geometry])
[ "def", "get_SZ", "(", "self", ",", "psd", ",", "geometry", ")", ":", "if", "(", "self", ".", "_S_table", "is", "None", ")", "or", "(", "self", ".", "_Z_table", "is", "None", ")", ":", "raise", "AttributeError", "(", "\"Initialize or load the scattering table first.\"", ")", "if", "(", "not", "isinstance", "(", "psd", ",", "PSD", ")", ")", "or", "self", ".", "_previous_psd", "!=", "psd", ":", "self", ".", "_S_dict", "=", "{", "}", "self", ".", "_Z_dict", "=", "{", "}", "psd_w", "=", "psd", "(", "self", ".", "_psd_D", ")", "for", "geom", "in", "self", ".", "geometries", ":", "self", ".", "_S_dict", "[", "geom", "]", "=", "trapz", "(", "self", ".", "_S_table", "[", "geom", "]", "*", "psd_w", ",", "self", ".", "_psd_D", ")", "self", ".", "_Z_dict", "[", "geom", "]", "=", "trapz", "(", "self", ".", "_Z_table", "[", "geom", "]", "*", "psd_w", ",", "self", ".", "_psd_D", ")", "self", ".", "_previous_psd", "=", "psd", "return", "(", "self", ".", "_S_dict", "[", "geometry", "]", ",", "self", ".", "_Z_dict", "[", "geometry", "]", ")" ]
34.8
18.96
def merge(self, services): """Merge extended host information into services :param services: services list, to look for a specific one :type services: alignak.objects.service.Services :return: None """ for extinfo in self: if hasattr(extinfo, 'register') and not getattr(extinfo, 'register'): # We don't have to merge template continue hosts_names = extinfo.get_name().split(",") for host_name in hosts_names: serv = services.find_srv_by_name_and_hostname(host_name, extinfo.service_description) if serv is not None: # Fusion self.merge_extinfo(serv, extinfo)
[ "def", "merge", "(", "self", ",", "services", ")", ":", "for", "extinfo", "in", "self", ":", "if", "hasattr", "(", "extinfo", ",", "'register'", ")", "and", "not", "getattr", "(", "extinfo", ",", "'register'", ")", ":", "# We don't have to merge template", "continue", "hosts_names", "=", "extinfo", ".", "get_name", "(", ")", ".", "split", "(", "\",\"", ")", "for", "host_name", "in", "hosts_names", ":", "serv", "=", "services", ".", "find_srv_by_name_and_hostname", "(", "host_name", ",", "extinfo", ".", "service_description", ")", "if", "serv", "is", "not", "None", ":", "# Fusion", "self", ".", "merge_extinfo", "(", "serv", ",", "extinfo", ")" ]
44.055556
17.777778
def resize_move(self, title, xOrigin=-1, yOrigin=-1, width=-1, height=-1, matchClass=False): """ Resize and/or move the specified window Usage: C{window.close(title, xOrigin=-1, yOrigin=-1, width=-1, height=-1, matchClass=False)} Leaving and of the position/dimension values as the default (-1) will cause that value to be left unmodified. @param title: window title to match against (as case-insensitive substring match) @param xOrigin: new x origin of the window (upper left corner) @param yOrigin: new y origin of the window (upper left corner) @param width: new width of the window @param height: new height of the window @param matchClass: if True, match on the window class instead of the title """ mvArgs = ["0", str(xOrigin), str(yOrigin), str(width), str(height)] if matchClass: xArgs = ["-x"] else: xArgs = [] self._run_wmctrl(["-r", title, "-e", ','.join(mvArgs)] + xArgs)
[ "def", "resize_move", "(", "self", ",", "title", ",", "xOrigin", "=", "-", "1", ",", "yOrigin", "=", "-", "1", ",", "width", "=", "-", "1", ",", "height", "=", "-", "1", ",", "matchClass", "=", "False", ")", ":", "mvArgs", "=", "[", "\"0\"", ",", "str", "(", "xOrigin", ")", ",", "str", "(", "yOrigin", ")", ",", "str", "(", "width", ")", ",", "str", "(", "height", ")", "]", "if", "matchClass", ":", "xArgs", "=", "[", "\"-x\"", "]", "else", ":", "xArgs", "=", "[", "]", "self", ".", "_run_wmctrl", "(", "[", "\"-r\"", ",", "title", ",", "\"-e\"", ",", "','", ".", "join", "(", "mvArgs", ")", "]", "+", "xArgs", ")" ]
46.954545
26.409091