language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public static final KeyPressHandler getRegExKeyPressHandler(final String pregEx) { if (StringUtils.isEmpty(pregEx)) { return null; } RegExKeyPressHandler result = HandlerFactory.REG_EX_KEY_PRESS_HANDLER_MAP.get(pregEx); if (result == null) { result = new RegExKeyPressHandler(pregEx); } return result; }
python
def revert(self, strip=0, root=None): """ apply patch in reverse order """ reverted = copy.deepcopy(self) reverted._reverse() return reverted.apply(strip, root)
java
public void updateLastKnownStaleSequence(MetaDataContainer metaData, int partition) { long lastReceivedSequence; long lastKnownStaleSequence; do { lastReceivedSequence = metaData.getSequence(); lastKnownStaleSequence = metaData.getStaleSequence(); if (lastKnownStaleSequence >= lastReceivedSequence) { break; } } while (!metaData.casStaleSequence(lastKnownStaleSequence, lastReceivedSequence)); if (logger.isFinestEnabled()) { logger.finest(format("%s:[map=%s,partition=%d,lowerSequencesStaleThan=%d,lastReceivedSequence=%d]", "Stale sequences updated", name, partition, metaData.getStaleSequence(), metaData.getSequence())); } }
python
def delete(obj, key=None): """ Delete a single key if specified, or all env if key is none :param obj: settings object :param key: key to delete from store location :return: None """ client = StrictRedis(**obj.REDIS_FOR_DYNACONF) holder = obj.get("ENVVAR_PREFIX_FOR_DYNACONF") if key: client.hdel(holder.upper(), key.upper()) obj.unset(key) else: keys = client.hkeys(holder.upper()) client.delete(holder.upper()) obj.unset_all(keys)
python
def parse_section_extras_require(self, section_options): """Parses `extras_require` configuration file section. :param dict section_options: """ parse_list = partial(self._parse_list, separator=';') self['extras_require'] = self._parse_section_to_dict( section_options, parse_list)
python
def get_child_fn(attrs, names, bases): """Returns a function from the child class that matches one of the names. Searches the child class's set of methods (i.e., the attrs dict) for all the functions matching the given list of names. If more than one is found, an exception is raised, if one is found, it is returned, and if none are found, a function that calls the default method on each parent class is returned. """ def call_method(obj, method): """Calls a method as either a class method or an instance method. """ # The __get__ method takes an instance and an owner which changes # depending on the calling object. If the calling object is a class, # the instance is None and the owner will be the object itself. If the # calling object is an instance, the instance will be the calling object # and the owner will be its class. For more info on the __get__ method, # see http://docs.python.org/2/reference/datamodel.html#object.__get__. if isinstance(obj, type): instance = None owner = obj else: instance = obj owner = obj.__class__ method.__get__(instance, owner)() # Create a default function that calls the default method on each parent default_name = names[0] def default_fn(obj): for cls in bases: if hasattr(cls, default_name): call_method(obj, getattr(cls, default_name)) default_fn.__name__ = default_name # Get all of the functions in the child class that match the list of names fns = [(name, attrs[name]) for name in names if name in attrs] # Raise an error if more than one setup/teardown method is found if len(fns) > 1: raise RuntimeError("Cannot have more than one setup or teardown method per context (class or test).") # If one setup/teardown function was found, return it elif len(fns) == 1: name, fn = fns[0] def child_fn(obj): call_method(obj, fn) child_fn.__name__ = name return child_fn # Otherwise, return the default function else: return default_fn
python
def get_vpc_dict(): """Returns dictionary of named VPCs {name: vpc} Assert fails if there's more than one VPC with same name.""" client = get_ec2_client() response = client.describe_vpcs() assert is_good_response(response) result = OrderedDict() ec2 = get_ec2_resource() for vpc_response in response['Vpcs']: key = get_name(vpc_response.get('Tags', [])) if not key or key == EMPTY_NAME: # skip VPC's that don't have a name assigned continue if key in result: util.log(f"Warning: Duplicate VPC group {key} in {response}") if DUPLICATE_CHECKING: assert False result[key] = ec2.Vpc(vpc_response['VpcId']) return result
python
def plot_scalar(step, var, field=None, axis=None, set_cbar=True, **extra): """Plot scalar field. Args: step (:class:`~stagpy.stagyydata._Step`): a step of a StagyyData instance. var (str): the scalar field name. field (:class:`numpy.array`): if not None, it is plotted instead of step.fields[var]. This is useful to plot a masked or rescaled array. Note that if conf.scaling.dimensional is True, this field will be scaled accordingly. axis (:class:`matplotlib.axes.Axes`): the axis objet where the field should be plotted. If set to None, a new figure with one subplot is created. set_cbar (bool): whether to add a colorbar to the plot. extra (dict): options that will be passed on to :func:`matplotlib.axes.Axes.pcolormesh`. Returns: fig, axis, surf, cbar handles to various :mod:`matplotlib` objects, respectively the figure, the axis, the surface returned by :func:`~matplotlib.axes.Axes.pcolormesh`, and the colorbar returned by :func:`matplotlib.pyplot.colorbar`. """ if var in phyvars.FIELD: meta = phyvars.FIELD[var] else: meta = phyvars.FIELD_EXTRA[var] meta = phyvars.Varf(misc.baredoc(meta.description), meta.dim) if step.geom.threed: raise NotAvailableError('plot_scalar only implemented for 2D fields') xmesh, ymesh, fld = get_meshes_fld(step, var) xmin, xmax = xmesh.min(), xmesh.max() ymin, ymax = ymesh.min(), ymesh.max() if field is not None: fld = field if conf.field.perturbation: fld = fld - np.mean(fld, axis=0) if conf.field.shift: fld = np.roll(fld, conf.field.shift, axis=0) fld, unit = step.sdat.scale(fld, meta.dim) if axis is None: fig, axis = plt.subplots(ncols=1) else: fig = axis.get_figure() if step.sdat.par['magma_oceans_in']['magma_oceans_mode']: rcmb = step.sdat.par['geometry']['r_cmb'] xmax = rcmb + 1 ymax = xmax xmin = -xmax ymin = -ymax rsurf = xmax if step.timeinfo['thick_tmo'] > 0 \ else step.geom.r_mesh[0, 0, -3] cmb = mpat.Circle((0, 0), rcmb, color='dimgray', zorder=0) psurf = mpat.Circle((0, 0), rsurf, color='indianred', zorder=0) axis.add_patch(psurf) axis.add_patch(cmb) extra_opts = dict( cmap=conf.field.cmap.get(var), vmin=conf.plot.vmin, vmax=conf.plot.vmax, norm=mpl.colors.LogNorm() if var == 'eta' else None, rasterized=conf.plot.raster, shading='gouraud' if conf.field.interpolate else 'flat', ) extra_opts.update(extra) surf = axis.pcolormesh(xmesh, ymesh, fld, **extra_opts) cbar = None if set_cbar: cbar = plt.colorbar(surf, shrink=conf.field.shrinkcb) cbar.set_label(meta.description + (' pert.' if conf.field.perturbation else '') + (' ({})'.format(unit) if unit else '')) if step.geom.spherical or conf.plot.ratio is None: plt.axis('equal') plt.axis('off') else: axis.set_aspect(conf.plot.ratio / axis.get_data_ratio()) axis.set_adjustable('box') axis.set_xlim(xmin, xmax) axis.set_ylim(ymin, ymax) return fig, axis, surf, cbar
python
def render_template(process, template_string, context): """Render template using the specified expression engine.""" from resolwe.flow.managers import manager # Get the appropriate expression engine. If none is defined, do not evaluate # any expressions. expression_engine = process.requirements.get('expression-engine', None) if not expression_engine: return template_string return manager.get_expression_engine(expression_engine).evaluate_block(template_string, context)
java
public void requestDelete(final String id) throws SDKException { CloseableHttpResponse response = null; HttpDelete httpDelete = null; checkToken(); try { log.debug("pathUrl: " + pathUrl); log.debug("id: " + pathUrl + "/" + id); // call the api here log.info("Executing delete on: " + this.pathUrl + "/" + id); httpDelete = new HttpDelete(this.pathUrl + "/" + id); httpDelete.setHeader(new BasicHeader("project-id", projectId)); if (token != null) { httpDelete.setHeader(new BasicHeader("authorization", bearerToken.replaceAll("\"", ""))); } response = httpClient.execute(httpDelete); // check response status RestUtils.checkStatus(response, HttpURLConnection.HTTP_NO_CONTENT); httpDelete.releaseConnection(); // return the response of the request } catch (IOException e) { // catch request exceptions here log.error(e.getMessage(), e); if (httpDelete != null) { httpDelete.releaseConnection(); } throw new SDKException("Could not http-delete", e.getStackTrace(), e.getMessage()); } catch (SDKException e) { if (response != null && (response.getStatusLine().getStatusCode() == HttpStatus.SC_UNAUTHORIZED)) { token = null; if (httpDelete != null) { httpDelete.releaseConnection(); } requestDelete(id); return; } throw new SDKException( "Could not http-delete or the api response was wrong", e.getStackTrace(), e.getMessage()); } finally { if (httpDelete != null) { httpDelete.releaseConnection(); } } }
python
def compare(self, first, second, chamber, type='votes', congress=CURRENT_CONGRESS): """ See how often two members voted together in a given Congress. Takes two member IDs, a chamber and a Congress number. """ check_chamber(chamber) path = "members/{first}/{type}/{second}/{congress}/{chamber}.json" path = path.format(first=first, second=second, type=type, congress=congress, chamber=chamber) return self.fetch(path)
java
private MjdbcSQLException translateJDBC4Exception(String reason, String SQLState, int vendorCode, SQLException cause) throws MjdbcException { MjdbcSQLException result = null; if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLTransientException") == true) { if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLTransactionRollbackException") == true) { result = new ConcurrencyFailureException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLTransientConnectionException") == true) { result = new TransientDataAccessResourceException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLTimeoutException") == true) { result = new QueryTimeoutException(reason, SQLState, vendorCode); } } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLNonTransientException") == true) { if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLDataException") == true) { result = new DataIntegrityViolationException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLFeatureNotSupportedException") == true) { result = new InvalidDataAccessApiUsageException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLIntegrityConstraintViolationException") == true) { result = new DataIntegrityViolationException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLInvalidAuthorizationSpecException") == true) { result = new PermissionDeniedDataAccessException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLNonTransientConnectionException") == true) { result = new DataAccessResourceFailureException(reason, SQLState, vendorCode); } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLSyntaxErrorException") == true) { result = new BadSqlGrammarException(reason, SQLState, vendorCode); } } else if (MappingUtils.objectAssignableTo(cause, "java.sql.SQLRecoverableException") == true) { result = new RecoverableDataAccessException(reason, SQLState, vendorCode); } return result; }
python
def get_sos_step_steps(stmt): ''' Extract sos_step(x) from statement ''' opt_values = get_param_of_function( 'sos_step', stmt, extra_dict=env.sos_dict.dict()) for value in opt_values: if len(value) != 1: raise ValueError('sos_step only accept one and only one parameter') return [x[0] for x in opt_values]
java
public void paintFrame (Graphics2D g, int index, int x, int y) { _mirages[index].paint(g, x, y); }
python
def _get_block_transaction_data(db: BaseDB, transaction_root: Hash32) -> Iterable[Hash32]: """ Returns iterable of the encoded transactions for the given block header """ transaction_db = HexaryTrie(db, root_hash=transaction_root) for transaction_idx in itertools.count(): transaction_key = rlp.encode(transaction_idx) if transaction_key in transaction_db: yield transaction_db[transaction_key] else: break
java
public static void writeXML(Document xmldocument, String file) throws ParserConfigurationException, IOException { assert file != null : AssertMessages.notNullParameter(); try (FileOutputStream fos = new FileOutputStream(file)) { writeNode(xmldocument, fos); } }
python
def validate_indices(indices, n): """ Perform bounds-checking for an indexer. -1 is allowed for indicating missing values. Parameters ---------- indices : ndarray n : int length of the array being indexed Raises ------ ValueError Examples -------- >>> validate_indices([1, 2], 3) # OK >>> validate_indices([1, -2], 3) ValueError >>> validate_indices([1, 2, 3], 3) IndexError >>> validate_indices([-1, -1], 0) # OK >>> validate_indices([0, 1], 0) IndexError """ if len(indices): min_idx = indices.min() if min_idx < -1: msg = ("'indices' contains values less than allowed ({} < {})" .format(min_idx, -1)) raise ValueError(msg) max_idx = indices.max() if max_idx >= n: raise IndexError("indices are out-of-bounds")
java
public ActiveTunnel[] getLocalForwardingTunnels() throws IOException { Vector<ActiveTunnel> v = new Vector<ActiveTunnel>(); String[] localForwardings = getLocalForwardings(); for (int i = 0; i < localForwardings.length; i++) { ActiveTunnel[] tmp = getLocalForwardingTunnels(localForwardings[i]); for (int x = 0; x < tmp.length; x++) { v.add(tmp[x]); } } return (ActiveTunnel[]) v.toArray(new ActiveTunnel[v.size()]); }
python
def verify(self, h, sig): """ Return whether a signature is valid for hash h using this key. """ val = from_bytes_32(h) pubkey = self.public_pair() return self._generator.verify(pubkey, val, sigdecode_der(sig))
python
def set_prod_state(prod_state, device=None): ''' A function to set the prod_state in zenoss. Parameters: prod_state: (Required) Integer value of the state device: (Optional) Will use the grain 'fqdn' by default. CLI Example: salt zenoss.set_prod_state 1000 hostname ''' if not device: device = __salt__['grains.get']('fqdn') device_object = find_device(device) if not device_object: return "Unable to find a device in Zenoss for {0}".format(device) log.info('Setting prodState to %d on %s device', prod_state, device) data = dict(uids=[device_object['uid']], prodState=prod_state, hashcheck=device_object['hash']) return _router_request('DeviceRouter', 'setProductionState', [data])
python
def consumer_initialize_task(processor, consumer_client, shard_id, cursor_position, cursor_start_time, cursor_end_time=None): """ return TaskResult if failed, or else, return InitTaskResult :param processor: :param consumer_client: :param shard_id: :param cursor_position: :param cursor_start_time: :return: """ try: processor.initialize(shard_id) is_cursor_persistent = False check_point = consumer_client.get_check_point(shard_id) if check_point['checkpoint'] and len(check_point['checkpoint']) > 0: is_cursor_persistent = True cursor = check_point['checkpoint'] else: if cursor_position == CursorPosition.BEGIN_CURSOR: cursor = consumer_client.get_begin_cursor(shard_id) elif cursor_position == CursorPosition.END_CURSOR: cursor = consumer_client.get_end_cursor(shard_id) else: cursor = consumer_client.get_cursor(shard_id, cursor_start_time) end_cursor = None if cursor_end_time is not None: end_cursor = consumer_client.get_cursor(shard_id, cursor_end_time) return InitTaskResult(cursor, is_cursor_persistent, end_cursor) except Exception as e: return TaskResult(e)
python
def get_arg_parse(): """Parses the Command Line Arguments using argparse.""" # Create parser object: objParser = argparse.ArgumentParser() # Add argument to namespace -strCsvPrf results file path: objParser.add_argument('-strCsvPrf', required=True, metavar='/path/to/my_prior_res', help='Absolute file path of prior pRF results. \ Ignored if in testing mode.' ) # Add argument to namespace -strStmApr results file path: objParser.add_argument('-strStmApr', required=True, metavar='/path/to/my_prior_res', help='Absolute file path to npy file with \ stimulus apertures. Ignored if in testing \ mode.' ) # Add argument to namespace -lgcNoise flag: objParser.add_argument('-lgcNoise', dest='lgcNoise', action='store_true', default=False, help='Should noise be added to the simulated pRF\ time course?') # Add argument to namespace -lgcRtnNrl flag: objParser.add_argument('-lgcRtnNrl', dest='lgcRtnNrl', action='store_true', default=False, help='Should neural time course, unconvolved with \ hrf, be returned as well?') objParser.add_argument('-supsur', nargs='+', help='List of floats that represent the ratio of \ size neg surround to size pos center.', type=float, default=None) # Namespace object containign arguments and values: objNspc = objParser.parse_args() return objNspc
java
@Override public List<TextSpan> getSpans(Token token) { List<TextSpan> spans = new ArrayList<TextSpan>(); for (String type : connector.token2ItsTextSpans.keySet()) { Map<Token, TextSpan> tokToSpan = connector.token2ItsTextSpans.get(type); TextSpan span = tokToSpan.get(token); if (span != null) { spans.add(span); } } return spans; }
python
async def connect( cls, endpoint=None, uuid=None, username=None, password=None, cacert=None, bakery_client=None, loop=None, max_frame_size=None, retries=3, retry_backoff=10, ): """Connect to the websocket. If uuid is None, the connection will be to the controller. Otherwise it will be to the model. :param str endpoint: The hostname:port of the controller to connect to. :param str uuid: The model UUID to connect to (None for a controller-only connection). :param str username: The username for controller-local users (or None to use macaroon-based login.) :param str password: The password for controller-local users. :param str cacert: The CA certificate of the controller (PEM formatted). :param httpbakery.Client bakery_client: The macaroon bakery client to to use when performing macaroon-based login. Macaroon tokens acquired when logging will be saved to bakery_client.cookies. If this is None, a default bakery_client will be used. :param asyncio.BaseEventLoop loop: The event loop to use for async operations. :param int max_frame_size: The maximum websocket frame size to allow. :param int retries: When connecting or reconnecting, and all endpoints fail, how many times to retry the connection before giving up. :param int retry_backoff: Number of seconds to increase the wait between connection retry attempts (a backoff of 10 with 3 retries would wait 10s, 20s, and 30s). """ self = cls() if endpoint is None: raise ValueError('no endpoint provided') self.uuid = uuid if bakery_client is None: bakery_client = httpbakery.Client() self.bakery_client = bakery_client if username and '@' in username and not username.endswith('@local'): # We're trying to log in as an external user - we need to use # macaroon authentication with no username or password. if password is not None: raise errors.JujuAuthError('cannot log in as external ' 'user with a password') username = None self.usertag = tag.user(username) self.password = password self.loop = loop or asyncio.get_event_loop() self.__request_id__ = 0 # The following instance variables are initialized by the # _connect_with_redirect method, but create them here # as a reminder that they will exist. self.addr = None self.ws = None self.endpoint = None self.cacert = None self.info = None # Create that _Task objects but don't start the tasks yet. self._pinger_task = _Task(self._pinger, self.loop) self._receiver_task = _Task(self._receiver, self.loop) self._retries = retries self._retry_backoff = retry_backoff self.facades = {} self.messages = IdQueue(loop=self.loop) self.monitor = Monitor(connection=self) if max_frame_size is None: max_frame_size = self.MAX_FRAME_SIZE self.max_frame_size = max_frame_size await self._connect_with_redirect([(endpoint, cacert)]) return self
python
def establish_ssh(target=None, auto_trust=False, allow_agent=True, look_keys=True): r''' Establish a SSH connection to a remote host. It should be able to use SSH's config file Host name declarations. By default, will not automatically add trust for hosts, will use SSH agent and will try to load keys. ''' def password_prompt(username, hostname): r''' If the Host is relying on password authentication, lets ask it. Relying on SSH itself to take care of that would not work when the remote authentication is password behind a SSH-key+2FA jumphost. ''' return getpass.getpass('No SSH key for %s@%s, please provide password: ' % (username, hostname)) ssh_conn = None if target is not None: ssh_conf = get_sshconfig() cfg = { 'hostname': None, 'port': 22, 'allow_agent': allow_agent, 'look_for_keys': look_keys } if ssh_conf.has_key(target): user_config = ssh_conf.get(target) # If ssh_config file's Host defined 'User' instead of 'Username' if user_config.has_key('user') and not user_config.has_key('username'): user_config['username'] = user_config['user'] del user_config['user'] for k in ('username', 'hostname', 'port'): if k in user_config: cfg[k] = user_config[k] # Assume Password auth. If we don't do that, then when connecting # through a jumphost we will run into issues and the user will # not be able to input his password to the SSH prompt. if 'identityfile' in user_config: cfg['key_filename'] = user_config['identityfile'] else: cfg['password'] = password_prompt(cfg['username'], cfg['hostname'] or target) # Should be the last one, since ProxyCommand will issue connection to remote host if 'proxycommand' in user_config: cfg['sock'] = paramiko.ProxyCommand(user_config['proxycommand']) else: cfg['username'] = target.split('@')[0] cfg['hostname'] = target.split('@')[1].split(':')[0] cfg['password'] = password_prompt(cfg['username'], cfg['hostname']) try: cfg['port'] = int(target.split('@')[1].split(':')[1]) except IndexError: # IndexError will happen if no :PORT is there. # Default value 22 is defined above in 'cfg'. pass ssh_conn = paramiko.SSHClient() if auto_trust: ssh_conn.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh_conn.connect(**cfg) return ssh_conn
python
def encode_example(self, example_data): """See base class for details.""" np_dtype = np.dtype(self._dtype.as_numpy_dtype) # Convert to numpy if possible if not isinstance(example_data, np.ndarray): example_data = np.array(example_data, dtype=np_dtype) # Ensure the shape and dtype match if example_data.dtype != np_dtype: raise ValueError('Dtype {} do not match {}'.format( example_data.dtype, np_dtype)) utils.assert_shape_match(example_data.shape, self._shape) # For booleans, convert to integer (tf.train.Example does not support bool) if example_data.dtype == np.bool_: example_data = example_data.astype(int) return example_data
java
@Override public void associateLog(DistributedRecoveryLog otherLog, boolean failAssociatedLog) { if (tc.isEntryEnabled()) Tr.entry(tc, "associateLog", new Object[] { otherLog, failAssociatedLog, this }); if (otherLog instanceof RecoveryLogImpl) _recoveryLog.associateLog(((RecoveryLogImpl) otherLog).getMultiScopeLog(), failAssociatedLog); else _recoveryLog.associateLog(otherLog, failAssociatedLog); if (tc.isEntryEnabled()) Tr.exit(tc, "associateLog"); }
python
def retrieve_object(cache, template, indexes): """Retrieve an object from Redis using a pipeline. Arguments: template: a dictionary containg the keys for the object and template strings for the corresponding redis keys. The template string uses named string interpolation format. Example: { 'username': 'user:$(id)s:username', 'email': 'user:$(id)s:email', 'phone': 'user:$(id)s:phone' } indexes: a dictionary containing the values to use to cosntruct the redis keys: Example: { 'id': 342 } Returns: a dictionary with the same keys as template, but containing the values retrieved from redis, if all the values are retrieved. If any value is missing, returns None. Example: { 'username': 'bob', 'email': '[email protected]', 'phone': '555-555-5555' } """ keys = [] with cache as redis_connection: pipe = redis_connection.pipeline() for (result_key, redis_key_template) in template.items(): keys.append(result_key) pipe.get(redis_key_template % indexes) results = pipe.execute() return None if None in results else dict(zip(keys, results))
python
def correlation_matrix(adata, name_list=None, groupby=None, group=None, n_genes=20, data='Complete', method='pearson', annotation_key=None): """Calculate correlation matrix. Calculate a correlation matrix for genes strored in sample annotation using :func:`~scanpy.api.tl.rank_genes_groups`. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix. name_list : list, optional (default: None) Takes a list of genes for which to calculate the correlation matrix groupby : `str`, optional (default: None) If no name list is passed, genes are selected from the results of rank_gene_groups. Then this is the key of the sample grouping to consider. Note that in this case also a group index has to be specified. group : `int`, optional (default: None) Group index for which the correlation matrix for top_ranked genes should be calculated. Currently only int is supported, will change very soon n_genes : `int`, optional (default: 20) For how many genes to calculate correlation matrix? If specified, cuts the name list (in whatever order it is passed). data : {'Complete', 'Group', 'Rest'}, optional (default: 'Complete') At the moment, this is only relevant for the case that name_list is drawn from rank_gene_groups results. If specified, collects mask for the called group and then takes only those cells specified. If 'Complete', calculate correlation using full data If 'Group', calculate correlation within the selected group. If 'Rest', calculate corrlation for everything except the group method : {‘pearson’, ‘kendall’, ‘spearman’} optional (default: 'pearson') Which kind of correlation coefficient to use pearson : standard correlation coefficient kendall : Kendall Tau correlation coefficient spearman : Spearman rank correlation annotation_key: String, optional (default: None) Allows to define the name of the anndata entry where results are stored. """ # TODO: At the moment, only works for int identifiers ### If no genes are passed, selects ranked genes from sample annotation. ### At the moment, only calculate one table (Think about what comes next) if name_list is None: name_list = list() for j, k in enumerate(adata.uns['rank_genes_groups_gene_names']): if j >=n_genes: break name_list.append(adata.uns['rank_genes_groups_gene_names'][j][group]) else: if len(name_list)>n_genes: name_list=name_list[0:n_genes] # If special method (later) , truncate adata_relevant = adata[:, name_list] # This line just makes group_mask access easier. Nothing else but 'all' will stand here. groups = 'all' if data == 'Complete' or groupby is None: if issparse(adata_relevant.X): Data_array = adata_relevant.X.todense() else: Data_array = adata_relevant.X else: # get group_mask groups_order, groups_masks = utils.select_groups( adata, groups, groupby) if data == 'Group': if issparse(adata_relevant.X): Data_array = adata_relevant.X[groups_masks[group], :].todense() else: Data_array = adata_relevant.X[groups_masks[group], :] elif data == 'Rest': if issparse(adata_relevant.X): Data_array = adata_relevant.X[~groups_masks[group], :].todense() else: Data_array = adata_relevant.X[~groups_masks[group], :] else: logg.error('data argument should be either <Complete> or <Group> or <Rest>' ) # Distinguish between sparse and non-sparse data DF_array = pd.DataFrame(Data_array, columns=name_list) cor_table = DF_array.corr(method=method) if annotation_key is None: if groupby is None: adata.uns['Correlation_matrix'] = cor_table else: adata.uns['Correlation_matrix'+groupby+str(group)]=cor_table else: adata.uns[annotation_key] = cor_table
java
public static Date parseDate(String date) { if (date == null) { return null; } try { return org.apache.commons.lang.time.DateUtils.parseDate(date, new String[] {DSP_DEFAULT_TIME_FORMAT}); } catch (ParseException e) { return null; } }
python
def authorization_denied_view(request): """Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``.""" authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0] authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1] authorization_denied_module = import_module(authorization_denied_module_name) authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name) return authorization_denied_view(request)
java
protected void checkThreshold (final int nCount) throws IOException { if (!m_bThresholdExceeded && (m_nWritten + nCount > m_nThreshold)) { m_bThresholdExceeded = true; onThresholdReached (); } }
python
def format_jid_instance(jid, job): ''' Format the jid correctly ''' ret = format_job_instance(job) ret.update({'StartTime': jid_to_time(jid)}) return ret
java
@Override public synchronized Reader executeCommandReader(String command) throws SshException, IOException { if (!isConnected()) { throw new IllegalStateException("Not connected!"); } try { Channel channel = connectSession.openChannel("exec"); ((ChannelExec)channel).setCommand(command); InputStreamReader reader = new InputStreamReader(channel.getInputStream(), "utf-8"); channel.connect(); return reader; } catch (JSchException ex) { throw new SshException(ex); } }
python
def parse_date(datestr): """ Parse a date expression into a tuple of: (start_date, span_type, span_format) Arguments: datestr -- A date specification, in the format of YYYY-MM-DD (dashes optional) """ match = re.match( r'([0-9]{4})(-?([0-9]{1,2}))?(-?([0-9]{1,2}))?(_w)?$', datestr) if not match: return (arrow.get(datestr, tzinfo=config.timezone).replace(tzinfo=config.timezone), 'day', 'YYYY-MM-DD') year, month, day, week = match.group(1, 3, 5, 6) start = arrow.Arrow(year=int(year), month=int( month or 1), day=int(day or 1), tzinfo=config.timezone) if week: return start.span('week')[0], 'week', WEEK_FORMAT if day: return start, 'day', DAY_FORMAT if month: return start, 'month', MONTH_FORMAT if year: return start, 'year', YEAR_FORMAT raise ValueError("Could not parse date: {}".format(datestr))
python
def unsubscribe(self): """Unsubscribes this subscriber from the associated list.""" body = { "EmailAddress": self.email_address} response = self._post("/subscribers/%s/unsubscribe.json" % self.list_id, json.dumps(body))
python
def chgrp(path, group): ''' Change the group of a file path path to the file or directory group group owner CLI Example: .. code-block:: bash salt '*' file.chgrp /etc/passwd root ''' path = os.path.expanduser(path) user = get_user(path) return chown(path, user, group)
python
def reflect(cls, X, **kwargs): """Reflect is for visitors where you are exposing some information about the types reachable from a starting type to an external system. For example, a front-end, a REST URL router and documentation framework, an avro schema definition, etc. X can be a type or an instance. This API should be considered **experimental** """ if isinstance(X, type): value = None value_type = X else: value = X value_type = type(X) if not issubclass(value_type, Record): raise TypeError("Cannot reflect on %s" % value_type.__name__) visitor = cls.Visitor( cls.scantypes, cls.propinfo, cls.itemtypes, cls.typeinfo, **kwargs) return cls.map(visitor, value, value_type)
java
public static String encodeHeaderParameter(String name, String value) { name = name.toLowerCase(Locale.US); // value := token / quoted-string if (isToken(value)) { return name + "=" + value; } else { return name + "=" + quote(value); } }
python
def read_yaml(self, filename): ''' Reads and parses a YAML file and returns the content. ''' with open(filename, 'r') as f: d = re.sub(r'\{\{ *([^ ]+) *\}\}', r'\1', f.read()) y = yaml.safe_load(d) return y if y else {}
java
public static AssetVersionCreator creator(final String pathServiceSid, final String pathAssetSid, final String path, final AssetVersion.Visibility visibility) { return new AssetVersionCreator(pathServiceSid, pathAssetSid, path, visibility); }
python
def ensure_one_opt(opt, parser, opt_list): """ Check that one and only one in the opt_list is defined in opt Parameters ---------- opt : object Result of option parsing parser : object OptionParser instance. opt_list : list of strings """ the_one = None for name in opt_list: attr = name[2:].replace('-', '_') if hasattr(opt, attr) and (getattr(opt, attr) is not None): if the_one is None: the_one = name else: parser.error("%s and %s are mutually exculsive" \ % (the_one, name)) if the_one is None: parser.error("you must supply one of the following %s" \ % (', '.join(opt_list)))
python
def str_extractall(arr, pat, flags=0): r""" For each subject string in the Series, extract groups from all matches of regular expression pat. When each subject string in the Series has exactly one match, extractall(pat).xs(0, level='match') is the same as extract(pat). .. versionadded:: 0.18.0 Parameters ---------- pat : str Regular expression pattern with capturing groups. flags : int, default 0 (no flags) A ``re`` module flag, for example ``re.IGNORECASE``. These allow to modify regular expression matching for things like case, spaces, etc. Multiple flags can be combined with the bitwise OR operator, for example ``re.IGNORECASE | re.MULTILINE``. Returns ------- DataFrame A ``DataFrame`` with one row for each match, and one column for each group. Its rows have a ``MultiIndex`` with first levels that come from the subject ``Series``. The last level is named 'match' and indexes the matches in each item of the ``Series``. Any capture group names in regular expression pat will be used for column names; otherwise capture group numbers will be used. See Also -------- extract : Returns first match only (not all matches). Examples -------- A pattern with one group will return a DataFrame with one column. Indices with no matches will not appear in the result. >>> s = pd.Series(["a1a2", "b1", "c1"], index=["A", "B", "C"]) >>> s.str.extractall(r"[ab](\d)") 0 match A 0 1 1 2 B 0 1 Capture group names are used for column names of the result. >>> s.str.extractall(r"[ab](?P<digit>\d)") digit match A 0 1 1 2 B 0 1 A pattern with two groups will return a DataFrame with two columns. >>> s.str.extractall(r"(?P<letter>[ab])(?P<digit>\d)") letter digit match A 0 a 1 1 a 2 B 0 b 1 Optional groups that do not match are NaN in the result. >>> s.str.extractall(r"(?P<letter>[ab])?(?P<digit>\d)") letter digit match A 0 a 1 1 a 2 B 0 b 1 C 0 NaN 1 """ regex = re.compile(pat, flags=flags) # the regex must contain capture groups. if regex.groups == 0: raise ValueError("pattern contains no capture groups") if isinstance(arr, ABCIndexClass): arr = arr.to_series().reset_index(drop=True) names = dict(zip(regex.groupindex.values(), regex.groupindex.keys())) columns = [names.get(1 + i, i) for i in range(regex.groups)] match_list = [] index_list = [] is_mi = arr.index.nlevels > 1 for subject_key, subject in arr.iteritems(): if isinstance(subject, str): if not is_mi: subject_key = (subject_key, ) for match_i, match_tuple in enumerate(regex.findall(subject)): if isinstance(match_tuple, str): match_tuple = (match_tuple,) na_tuple = [np.NaN if group == "" else group for group in match_tuple] match_list.append(na_tuple) result_key = tuple(subject_key + (match_i, )) index_list.append(result_key) from pandas import MultiIndex index = MultiIndex.from_tuples( index_list, names=arr.index.names + ["match"]) result = arr._constructor_expanddim(match_list, index=index, columns=columns) return result
python
def stop(self): """Stop serving. Always call this to clean up after yourself.""" self._stopped = True threads = [self._accept_thread] threads.extend(self._server_threads) self._listening_sock.close() for sock in list(self._server_socks): try: sock.shutdown(socket.SHUT_RDWR) except socket.error: pass try: sock.close() except socket.error: pass with self._unlock(): for thread in threads: thread.join(10) if self._uds_path: try: os.unlink(self._uds_path) except OSError: pass
java
public void marshall(AdminForgetDeviceRequest adminForgetDeviceRequest, ProtocolMarshaller protocolMarshaller) { if (adminForgetDeviceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(adminForgetDeviceRequest.getUserPoolId(), USERPOOLID_BINDING); protocolMarshaller.marshall(adminForgetDeviceRequest.getUsername(), USERNAME_BINDING); protocolMarshaller.marshall(adminForgetDeviceRequest.getDeviceKey(), DEVICEKEY_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
private void addFieldElement(ECFieldElement v) { final byte[] p = BigIntegers.asUnsignedByteArray(this.curveLength, v.toBigInteger()); this.digest.update(p, 0, p.length); }
python
def _error_to_string(self, error_id): """Returns an error string from libiperf :param error_id: The error_id produced by libiperf :rtype: string """ strerror = self.lib.iperf_strerror strerror.restype = c_char_p return strerror(error_id).decode('utf-8')
python
def on_before_publish_insert_request_id_header(headers, **kwargs): """ This function is meant to be used as signal processor for "before_task_publish". :param Dict headers: The headers of the message :param kwargs: Any extra keyword arguments """ if _CELERY_X_HEADER not in headers: request_id = current_request_id() headers[_CELERY_X_HEADER] = request_id logger.debug("Forwarding request_id '{}' to the task consumer.".format(request_id))
python
def search(table: LdapObjectClass, query: Optional[Q] = None, database: Optional[Database] = None, base_dn: Optional[str] = None) -> Iterator[LdapObject]: """ Search for a object of given type in the database. """ fields = table.get_fields() db_fields = { name: field for name, field in fields.items() if field.db_field } database = get_database(database) connection = database.connection search_options = table.get_search_options(database) iterator = tldap.query.search( connection=connection, query=query, fields=db_fields, base_dn=base_dn or search_options.base_dn, object_classes=search_options.object_class, pk=search_options.pk_field, ) for dn, data in iterator: python_data = _db_to_python(data, table, dn) python_data = table.on_load(python_data, database) yield python_data
java
private void rebuildSimpleTab() { m_form.removeGroup(CmsPropertyPanel.TAB_SIMPLE); CmsPropertyPanel panel = ((CmsPropertyPanel)m_form.getWidget()); panel.clearTab(CmsPropertyPanel.TAB_SIMPLE); if (m_handler.hasEditableName()) { m_form.addField(CmsPropertyPanel.TAB_SIMPLE, createUrlNameField()); } internalBuildConfiguredFields(); m_form.renderGroup(CmsPropertyPanel.TAB_SIMPLE); }
java
@Override public int getChildCount(final List<Integer> row) { // Should not occur if (row == null || row.isEmpty()) { return 0; } // No expandable levels defined, so always 0 if (levels.size() == 1) { return 0; } // Check iterations if (isIterateFirstLevel() && getMaxIterations() > -1 && row.size() > getMaxIterations()) { // Reached limit of iterations return 0; } // Index for current level int lvlIndex = getLevelIndex(row); // Check there is a Next Level int nxtIdx = isIterateFirstLevel() ? 1 : lvlIndex + 1; if (nxtIdx >= levels.size()) { return 0; } LevelDetails nxtLevel = levels.get(nxtIdx); // Get row bean Object rowBean = getRowBean(row); if (rowBean == null) { return 0; } // Data for next level Object lvlData = getBeanPropertyValue(nxtLevel.getLevelBeanProperty(), rowBean); if (lvlData == null) { return 0; } // If row per item, then count children if (nxtLevel.isRowPerListItem()) { return getSize(lvlData); } else { // One child return 1; } }
python
def removed(name, ruby=None, user=None, gem_bin=None): ''' Make sure that a gem is not installed. name The name of the gem to uninstall gem_bin : None Full path to ``gem`` binary to use. ruby : None If RVM or rbenv are installed, the ruby version and gemset to use. Ignored if ``gem_bin`` is specified. user: None The user under which to run the ``gem`` command .. versionadded:: 0.17.0 ''' ret = {'name': name, 'result': None, 'comment': '', 'changes': {}} if name not in __salt__['gem.list'](name, ruby, gem_bin=gem_bin, runas=user): ret['result'] = True ret['comment'] = 'Gem is not installed.' return ret if __opts__['test']: ret['comment'] = 'The gem {0} would have been removed'.format(name) return ret if __salt__['gem.uninstall'](name, ruby, gem_bin=gem_bin, runas=user): ret['result'] = True ret['changes'][name] = 'Removed' ret['comment'] = 'Gem was successfully removed.' else: ret['result'] = False ret['comment'] = 'Could not remove gem.' return ret
java
@Api public void setMapType(MapType type) { this.type = type; if (googleMap != null) { setMapType(googleMap, type.toString()); } }
java
public static String formatUnknownKey(String keyName) { StringBuffer buf = new StringBuffer(64); buf.append(UNKNOWN_KEY_EXTENSION); buf.append(" "); buf.append(keyName); buf.append(" "); buf.append(UNKNOWN_KEY_EXTENSION); return buf.toString(); }
python
def is_raster_y_inverted(layer): """Check if the raster is upside down, ie Y inverted. See issue : https://github.com/inasafe/inasafe/issues/4026 :param layer: The layer to test. :type layer: QgsRasterLayer :return: A boolean to know if the raster is correct or not. :rtype: bool """ info = gdal.Info(layer.source(), format='json') y_maximum = info['cornerCoordinates']['upperRight'][1] y_minimum = info['cornerCoordinates']['lowerRight'][1] return y_maximum < y_minimum
python
def load_lsdsng(filename): """Load a Project from a ``.lsdsng`` file. :param filename: the name of the file from which to load :rtype: :py:class:`pylsdj.Project` """ # Load preamble data so that we know the name and version of the song with open(filename, 'rb') as fp: preamble_data = bread.parse(fp, spec.lsdsng_preamble) with open(filename, 'rb') as fp: # Skip the preamble this time around fp.seek(int(len(preamble_data) / 8)) # Load compressed data into a block map and use BlockReader to # decompress it factory = BlockFactory() while True: block_data = bytearray(fp.read(blockutils.BLOCK_SIZE)) if len(block_data) == 0: break block = factory.new_block() block.data = block_data remapped_blocks = filepack.renumber_block_keys(factory.blocks) reader = BlockReader() compressed_data = reader.read(remapped_blocks) # Now, decompress the raw data and use it and the preamble to construct # a Project raw_data = filepack.decompress(compressed_data) name = preamble_data.name version = preamble_data.version size_blks = int(math.ceil( float(len(compressed_data)) / blockutils.BLOCK_SIZE)) return Project(name, version, size_blks, raw_data)
python
def _serialize_pages(self): """ Return a JSON API compliant pagination links section If the paginator has a value for a given link then this method will also add the same links to the response objects `link` header according to the guidance of RFC 5988. Falcon has a native add_link helper for forming the `link` header according to RFC 5988. :return: dict of links used for pagination """ pages = self.req.pages.to_dict() links = {} for key, val in pages.items(): if val: params = self.req.params params.update(val) links[key] = '%s?%s' % (self.req.path, urlencode(params)) self.resp.add_link(links[key], key) else: links[key] = val return links
python
def smooth(x, y, degree=1, logx=False, logy=False): """Smooth y-values and return new x, y pair. :param x,y: data values :param degree: degree of smoothing Smooth data by using a recursive linear interpolation technique. For degree = 0, return the original values. For degree = 1, generate a evenly spaced sequence of x-values, with the length equal to the original length. The y-values are linearly interpolated for these x-values. For degree >= 2, calls itself with degree - 1, then calculates new x-values by taking the averages of the returned x-values, and calculates new y-values by linear interpolation. The return values are thus reduced in length by one sample. """ if degree == 0: return x, y else: if logx: x = np.log10(x) if logy: y = np.log10(y) if degree == 1: # generate new linearly spaced x-points smoothed_x = np.linspace(min(x), max(x), len(x)) # generate new y-points using linear interpolation smoothed_y = np.interp(smoothed_x, x, y) else: # smooth data by linear interpolation x, y = smooth(x, y, degree - 1) smoothed_x = (x[:-1] + x[1:]) / 2 smoothed_y = np.interp(smoothed_x, x, y) if logx: smoothed_x = 10 ** smoothed_x if logy: smoothed_y = 10 ** smoothed_y return smoothed_x, smoothed_y
java
public boolean canPing() { val uidPsw = getClass().getSimpleName(); for (val server : this.servers) { LOGGER.debug("Attempting to ping RADIUS server [{}] via simulating an authentication request. If the server responds " + "successfully, mock authentication will fail correctly.", server); try { server.authenticate(uidPsw, uidPsw); } catch (final TimeoutException | SocketTimeoutException e) { LOGGER.debug("Server [{}] is not available", server); continue; } catch (final Exception e) { LOGGER.debug("Pinging RADIUS server was successful. Response [{}]", e.getMessage()); } return true; } return false; }
python
def get_witnesses(self, name='*'): """Returns a generator supplying `WitnessText` objects for each work in the corpus. :rtype: `generator` of `WitnessText` """ for filepath in glob.glob(os.path.join(self._path, name, '*.txt')): if os.path.isfile(filepath): name = os.path.split(os.path.split(filepath)[0])[1] siglum = os.path.splitext(os.path.basename(filepath))[0] yield self.get_witness(name, siglum)
java
public void send(StDatapoint datapoint) { StDatapointValidator.validate(datapoint); sender.send(Collections.singletonList(datapoint)); }
java
@CheckReturnValue public AuditableRestAction<Void> ban(Member member, int delDays) { return ban(member, delDays, null); }
python
def if_then(self, classical_reg, if_program, else_program=None): """ If the classical register at index classical reg is 1, run if_program, else run else_program. Equivalent to the following construction: .. code:: IF [c]: instrA... ELSE: instrB... => JUMP-WHEN @THEN [c] instrB... JUMP @END LABEL @THEN instrA... LABEL @END :param int classical_reg: The classical register to check as the condition :param Program if_program: A Quil program to execute if classical_reg is 1 :param Program else_program: A Quil program to execute if classical_reg is 0. This argument is optional and defaults to an empty Program. :returns: The Quil Program with the branching instructions added. :rtype: Program """ else_program = else_program if else_program is not None else Program() label_then = LabelPlaceholder("THEN") label_end = LabelPlaceholder("END") self.inst(JumpWhen(target=label_then, condition=unpack_classical_reg(classical_reg))) self.inst(else_program) self.inst(Jump(label_end)) self.inst(JumpTarget(label_then)) self.inst(if_program) self.inst(JumpTarget(label_end)) return self
python
def _retransmit(self, transaction, message, future_time, retransmit_count): """ Thread function to retransmit the message in the future :param transaction: the transaction that owns the message that needs retransmission :param message: the message that needs the retransmission task :param future_time: the amount of time to wait before a new attempt :param retransmit_count: the number of retransmissions """ with transaction: while retransmit_count < defines.MAX_RETRANSMIT and (not message.acknowledged and not message.rejected) \ and not self.stopped.isSet(): transaction.retransmit_stop.wait(timeout=future_time) if not message.acknowledged and not message.rejected and not self.stopped.isSet(): retransmit_count += 1 future_time *= 2 self.send_datagram(message) if message.acknowledged or message.rejected: message.timeouted = False else: logger.warning("Give up on message {message}".format(message=message.line_print)) message.timeouted = True if message.observe is not None: self._observeLayer.remove_subscriber(message) try: self.to_be_stopped.remove(transaction.retransmit_stop) except ValueError: pass transaction.retransmit_stop = None transaction.retransmit_thread = None
java
private void processModifiersAndVariablesOfFieldDeclaration( FieldDeclaration fieldDeclaration, FieldMetadata arrayTypeFieldMetadata ) { processModifiersOfFieldDeclaration(fieldDeclaration, arrayTypeFieldMetadata); processVariablesOfVariableDeclarationFragment(fieldDeclaration, arrayTypeFieldMetadata); }
java
public static QueryColumn toQueryColumn(Object o) throws PageException { if (o instanceof QueryColumn) return (QueryColumn) o; throw new CasterException(o, "querycolumn"); }
java
public static void glBlendColor(float r, float g, float b, float a) { checkContextCompatibility(); nglBlendColor(r, g, b, a); }
java
@Override public void shutdown() { // Since Aeron's poll isn't blocking, all we need is just special flag runner.set(false); try { threadA.join(); if (threadB != null) threadB.join(); } catch (Exception e) { // } CloseHelper.quietClose(driver); try { Thread.sleep(500); } catch (Exception e) { } }
java
@Override public void saveChanges() { BatchOperation saveChangeOperation = new BatchOperation(this); try (BatchCommand command = saveChangeOperation.createRequest()) { if (command == null) { return; } if (noTracking) { throw new IllegalStateException("Cannot execute saveChanges when entity tracking is disabled in session."); } _requestExecutor.execute(command, sessionInfo); updateSessionAfterSaveChanges(command.getResult()); saveChangeOperation.setResult(command.getResult()); } }
java
private static boolean matchInf(byte[] str, byte firstchar, int start, int end) { final int len = end - start; // The wonders of unicode. The infinity symbol \u221E is three bytes: if(len == 3 && firstchar == -0x1E && str[start + 1] == -0x78 && str[start + 2] == -0x62) { return true; } if((len != 3 && len != INFINITY_LENGTH) // || (firstchar != 'I' && firstchar != 'i')) { return false; } for(int i = 1, j = INFINITY_LENGTH + 1; i < INFINITY_LENGTH; i++, j++) { final byte c = str[start + i]; if(c != INFINITY_PATTERN[i] && c != INFINITY_PATTERN[j]) { return false; } if(i == 2 && len == 3) { return true; } } return true; }
java
public void resolveType() { if (getTypeStyle() == null) { setTypeStyle(""); } final String fieldType = getFieldType(); String generics = ""; if (fieldType.contains("<")) { generics = fieldType.substring(fieldType.indexOf('<')); } if (getTypeStyle().equals("smart")) { setType(fieldType); } else if (getTypeStyle().length() > 0) { if (getTypeStyle().contains("<>")) { setType(getTypeStyle().replace("<>", generics)); } else if (getTypeStyle().contains("<")) { setType(getTypeStyle()); } else { setType(getTypeStyle() + generics); } } else { setType(fieldType); } }
python
def PartialDynamicSystem(self, ieq, variable): """ returns dynamical system blocks associated to output variable """ if ieq == 0: # w2=Rw1 if variable == self.physical_nodes[0].variable: # w1=w2/R return[Gain(self.physical_nodes[1].variable, variable, 1/self.ratio)] elif variable == self.physical_nodes[1].variable: # w2=Rw1 return[Gain(self.physical_nodes[0].variable, variable, self.ratio)] elif ieq == 1: # C1=-RC2 if variable == self.variables[0]: # C1=-RC2 return[Gain(self.variables[1], variable, -self.ratio)] elif variable == self.variables[1]: # C2=-C1/R return[Gain(self.variables[0], variable, -1/self.ratio)]
python
def _NormalizedVolumeIdentifiers( self, volume_system, volume_identifiers, prefix='v'): """Normalizes volume identifiers. Args: volume_system (VolumeSystem): volume system. volume_identifiers (list[int|str]): allowed volume identifiers, formatted as an integer or string with prefix. prefix (Optional[str]): volume identifier prefix. Returns: list[str]: volume identifiers with prefix. Raises: ScannerError: if the volume identifier is not supported or no volume could be found that corresponds with the identifier. """ normalized_volume_identifiers = [] for volume_identifier in volume_identifiers: if isinstance(volume_identifier, int): volume_identifier = '{0:s}{1:d}'.format(prefix, volume_identifier) elif not volume_identifier.startswith(prefix): try: volume_identifier = int(volume_identifier, 10) volume_identifier = '{0:s}{1:d}'.format(prefix, volume_identifier) except (TypeError, ValueError): pass try: volume = volume_system.GetVolumeByIdentifier(volume_identifier) except KeyError: volume = None if not volume: raise errors.ScannerError( 'Volume missing for identifier: {0:s}.'.format(volume_identifier)) normalized_volume_identifiers.append(volume_identifier) return normalized_volume_identifiers
java
public void removeChild(int index) { N child = children.remove(index); child.setParent(null); }
python
def _extract_text_and_child_element_list(minidom_node): """Returns a pair of the "child" content of minidom_node: the first element of the pair is a concatenation of the text content the second element is a list of non-text nodes. The string concatenation strips leading and trailing whitespace from each bit of text found and joins the fragments (with no separator between them). """ tl = [] ntl = [] for c in minidom_node.childNodes: if c.nodeType == xml.dom.minidom.Node.TEXT_NODE: tl.append(c) else: ntl.append(c) try: tl = [i.data.strip() for i in tl] text_content = ''.join(tl) except: text_content = '' return text_content, ntl
python
def off(self): """Send an OFF message to device group.""" off_command = ExtendedSend(self._address, COMMAND_LIGHT_OFF_0X13_0X00, self._udata) off_command.set_checksum() self._send_method(off_command, self._off_message_received)
python
def dbmin50years(self, value=None): """ Corresponds to IDD Field `dbmin50years` 50-year return period values for minimum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmin50years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dbmin50years`'.format(value)) self._dbmin50years = value
python
def add_stock(self, product_id, sku_info, quantity): """ 增加库存 :param product_id: 商品ID :param sku_info: sku信息,格式"id1:vid1;id2:vid2",如商品为统一规格,则此处赋值为空字符串即可 :param quantity: 增加的库存数量 :return: 返回的 JSON 数据包 """ return self._post( 'merchant/stock/add', data={ "product_id": product_id, "sku_info": sku_info, "quantity": quantity } )
python
def aq_esc_telemetry_encode(self, time_boot_ms, seq, num_motors, num_in_seq, escid, status_age, data_version, data0, data1): ''' Sends ESC32 telemetry data for up to 4 motors. Multiple messages may be sent in sequence when system has > 4 motors. Data is described as follows: // unsigned int state : 3; // unsigned int vin : 12; // x 100 // unsigned int amps : 14; // x 100 // unsigned int rpm : 15; // unsigned int duty : 8; // x (255/100) // - Data Version 2 - // unsigned int errors : 9; // Bad detects error count // - Data Version 3 - // unsigned int temp : 9; // (Deg C + 32) * 4 // unsigned int errCode : 3; time_boot_ms : Timestamp of the component clock since boot time in ms. (uint32_t) seq : Sequence number of message (first set of 4 motors is #1, next 4 is #2, etc). (uint8_t) num_motors : Total number of active ESCs/motors on the system. (uint8_t) num_in_seq : Number of active ESCs in this sequence (1 through this many array members will be populated with data) (uint8_t) escid : ESC/Motor ID (uint8_t) status_age : Age of each ESC telemetry reading in ms compared to boot time. A value of 0xFFFF means timeout/no data. (uint16_t) data_version : Version of data structure (determines contents). (uint8_t) data0 : Data bits 1-32 for each ESC. (uint32_t) data1 : Data bits 33-64 for each ESC. (uint32_t) ''' return MAVLink_aq_esc_telemetry_message(time_boot_ms, seq, num_motors, num_in_seq, escid, status_age, data_version, data0, data1)
java
private void not() { tq.consume(":not"); String subQuery = tq.chompBalanced('(', ')'); Validate.notEmpty(subQuery, ":not(selector) subselect must not be empty"); evals.add(new StructuralEvaluator.Not(parse(subQuery))); }
python
def multiupload(self, filename, hosts): """Upload file to multiple hosts simultaneously The upload will be attempted for each host until the optimal file redundancy is achieved (a percentage of successful uploads) or the host list is depleted. :param filename: The filename of the file to upload. :type filename: str :param hosts: A list of hosts as defined in the master host list. :type hosts: list :returns: A list of dicts with 'host_name' and 'url' keys for all successful uploads or an empty list if all uploads failed. :rtype: list """ manager = Manager() successful_uploads = manager.list([]) def f(host): if len(successful_uploads) / float(len(hosts)) < \ settings.MIN_FILE_REDUNDANCY: # Optimal redundancy not achieved, keep going result = self.upload_to_host(filename, host) if 'error' in result: self._host_errors[host] += 1 else: successful_uploads.append(result) multiprocessing.dummy.Pool(len(hosts)).map( f, self._hosts_by_success(hosts)) return list(successful_uploads)
java
public String uploadSliceFile(UploadSliceFileRequest request) throws AbstractCosException { request.check_param(); String controlRet = uploadSliceControl(request); JSONObject controlRetJson = new JSONObject(controlRet); // 如果控制分片已经出错, 则返回 if (controlRetJson.getInt(ResponseBodyKey.CODE) != 0) { return controlRet; } // 命中秒传 if (controlRetJson.getJSONObject(ResponseBodyKey.DATA).has(ResponseBodyKey.Data.ACCESS_URL)) { return controlRet; } int sliceSize = controlRetJson.getJSONObject(ResponseBodyKey.DATA).getInt(ResponseBodyKey.Data.SLICE_SIZE); long offset = controlRetJson.getJSONObject(ResponseBodyKey.DATA).getLong(ResponseBodyKey.Data.OFFSET); String session = controlRetJson.getJSONObject(ResponseBodyKey.DATA).getString(ResponseBodyKey.Data.SESSION); String localPath = request.getLocalPath(); InputStream inputStream = null; try { long fileSize = CommonFileUtils.getFileLength(localPath); inputStream = CommonFileUtils.getFileInputStream(localPath); inputStream.skip(offset); String uploadDataRet = ""; while (offset < fileSize) { String sliceContent = CommonFileUtils.getFileContent(inputStream, 0, sliceSize); uploadDataRet = uploadSliceData(request, sliceContent, session, offset); JSONObject dataRetJson = new JSONObject(uploadDataRet); if (dataRetJson.getInt(ResponseBodyKey.CODE) != 0) { return uploadDataRet; } else { if (dataRetJson.getJSONObject(ResponseBodyKey.DATA).has(ResponseBodyKey.Data.ACCESS_URL)) { return uploadDataRet; } } offset += sliceSize; } return uploadDataRet; } catch (Exception e) { throw new UnknownException(e.getMessage()); } finally { CommonFileUtils.closeFileStream(inputStream, localPath); } }
python
def values(self): """Return the `list representation`_ of the binary tree. .. _list representation: https://en.wikipedia.org/wiki/Binary_tree#Arrays :return: List representation of the binary tree, which is a list of node values in breadth-first order starting from the root (current node). If a node is at index i, its left child is always at 2i + 1, right child at 2i + 2, and parent at index floor((i - 1) / 2). None indicates absence of a node at that index. See example below for an illustration. :rtype: [int | float | None] **Example**: .. doctest:: >>> from binarytree import Node >>> >>> root = Node(1) >>> root.left = Node(2) >>> root.right = Node(3) >>> root.left.right = Node(4) >>> >>> root.values [1, 2, 3, None, 4] """ current_nodes = [self] has_more_nodes = True values = [] while has_more_nodes: has_more_nodes = False next_nodes = [] for node in current_nodes: if node is None: values.append(None) next_nodes.extend((None, None)) continue if node.left is not None or node.right is not None: has_more_nodes = True values.append(node.value) next_nodes.extend((node.left, node.right)) current_nodes = next_nodes # Get rid of trailing None's while values and values[-1] is None: values.pop() return values
python
def get_instance(self, payload): """ Build an instance of TriggerInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.usage.trigger.TriggerInstance :rtype: twilio.rest.api.v2010.account.usage.trigger.TriggerInstance """ return TriggerInstance(self._version, payload, account_sid=self._solution['account_sid'], )
python
def press(keys, presses=1, interval=0.0, pause=None, _pause=True): """Performs a keyboard key press down, followed by a release. Args: key (str, list): The key to be pressed. The valid names are listed in KEYBOARD_KEYS. Can also be a list of such strings. presses (integer, optiional): the number of press repetition 1 by default, for just one press interval (float, optional): How many seconds between each press. 0.0 by default, for no pause between presses. pause (float, optional): How many seconds in the end of function process. None by default, for no pause in the end of function process. Returns: None """ if type(keys) == str: keys = [keys] # put string in a list else: lowerKeys = [] for s in keys: if len(s) > 1: lowerKeys.append(s.lower()) else: lowerKeys.append(s) interval = float(interval) for i in range(presses): for k in keys: _failSafeCheck() platformModule._keyDown(k) platformModule._keyUp(k) time.sleep(interval) _autoPause(pause, _pause)
python
def resize_canvas(self, height=400, width=400): """ Function for the user to resize the internal Canvas widget if desired. :param height: new height in pixels :type height: int :param width: new width in pixels :type width: int """ self._canvas.configure(width=width, height=height)
java
public boolean includesAny(Tech... techs) { if (techs == null || techs.length == 0) { return false; } for (Tech tech : techs) { if (includes(tech)) { return true; } } return false; }
java
public static authenticationradiuspolicy_systemglobal_binding[] get(nitro_service service, String name) throws Exception{ authenticationradiuspolicy_systemglobal_binding obj = new authenticationradiuspolicy_systemglobal_binding(); obj.set_name(name); authenticationradiuspolicy_systemglobal_binding response[] = (authenticationradiuspolicy_systemglobal_binding[]) obj.get_resources(service); return response; }
python
def _offline_fcp_device(self, fcp, target_wwpn, target_lun, multipath): """sles offline zfcp. sampe to all rhel distro.""" device = '0.0.%s' % fcp # disk config disk_config = '/sbin/zfcp_disk_configure ' +\ '%(device)s %(wwpn)s %(lun)s 0' %\ {'device': device, 'wwpn': target_wwpn, 'lun': target_lun} # host config host_config = '/sbin/zfcp_host_configure %s 0' % device return '\n'.join((disk_config, host_config))
python
def hdel(self, name, *keys): """ Delete one or more hash field. :param name: str the name of the redis key :param keys: on or more members to remove from the key. :return: Future() """ with self.pipe as pipe: m_encode = self.memberparse.encode keys = [m_encode(m) for m in self._parse_values(keys)] return pipe.hdel(self.redis_key(name), *keys)
python
def _str_parser(string): """ return method by the length of string :param string: string :return: method """ if not any(c.isalpha() for c in string): _string = string[:19] _length = len(_string) if _length > 10: return BaseParser.parse_datetime elif 6 <= _length <= 10: if ':' in _string: return BaseParser.parse_time else: return BaseParser.parse_date elif _length < 6: return BaseParser.parse_time else: return BaseParser.parse_special else: return BaseParser.__parse_not_only_str
python
def overlay(repository, files, version, debug=False): """ Overlay files from the specified repository/version into the given directory and return None. :param repository: A string containing the path to the repository to be extracted. :param files: A list of `FileConfig` objects. :param version: A string containing the branch/tag/sha to be exported. :param debug: An optional bool to toggle debug output. :return: None """ with util.saved_cwd(): os.chdir(repository) _get_version(version, debug) for fc in files: if '*' in fc.src: for filename in glob.glob(fc.src): util.copy(filename, fc.dst) msg = ' - copied ({}) {} to {}'.format( version, filename, fc.dst) util.print_info(msg) else: if os.path.isdir(fc.dst) and os.path.isdir(fc.src): shutil.rmtree(fc.dst) util.copy(fc.src, fc.dst) msg = ' - copied ({}) {} to {}'.format( version, fc.src, fc.dst) util.print_info(msg)
python
def _json_data(x, extraneous): """This function calls a json_json method, if the type has one, otherwise calls back into to_json(). It also check whether the method takes an 'extraneous' argument and passes that through if possible.""" if type(x) in has_json_data and has_json_data[type(x)]: if json_data_takes_extraneous[type(x)]: return x.json_data(extraneous=extraneous) else: return x.json_data() else: htj = hasattr(x, "json_data") and callable(x.json_data) has_json_data[type(x)] = htj if htj: argspec = inspect.getargspec(x.json_data) tjte = 'extraneous' in argspec.args or argspec.keywords json_data_takes_extraneous[type(x)] = tjte if tjte: return x.json_data(extraneous=extraneous) else: return x.json_data() else: return to_json(x, extraneous)
java
public IRenderingElement generateDiagram(T object) { ElementGroup diagram = new ElementGroup(); for (IGenerator<T> generator : this.generators) { diagram.add(generator.generate(object, this.rendererModel)); } return diagram; }
python
def loaders(self): # pragma: no cover """Return available loaders""" if self.LOADERS_FOR_DYNACONF in (None, 0, "0", "false", False): self.logger.info("No loader defined") return [] if not self._loaders: for loader_module_name in self.LOADERS_FOR_DYNACONF: loader = importlib.import_module(loader_module_name) self._loaders.append(loader) return self._loaders
java
private static LineString linearZInterpolation(LineString lineString) { double startz = lineString.getStartPoint().getCoordinate().z; double endz = lineString.getEndPoint().getCoordinate().z; if (Double.isNaN(startz) || Double.isNaN(endz)) { return lineString; } else { double length = lineString.getLength(); lineString.apply(new LinearZInterpolationFilter(startz, endz, length)); return lineString; } }
java
@Override public ClientBuilder keyStore(final KeyStore keyStore, final String password) { this.clientKeyStore = keyStore; this.clientPrivateKeyPassword = password; return this; }
python
def get_seconds_until_next_day(now=None): """ Returns the number of seconds until the next day (utc midnight). This is the long-term rate limit used by Strava. :param now: A (utc) timestamp :type now: arrow.arrow.Arrow :return: the number of seconds until next day, as int """ if now is None: now = arrow.utcnow() return (now.ceil('day') - now).seconds
python
def convert_data_array(arr, filter_func=None, converter_func=None): '''Filter and convert any given data array of any dtype. Parameters ---------- arr : numpy.array Data array of any dtype. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- array of specified dimension (converter_func) and content (filter_func) ''' # if filter_func != None: # if not hasattr(filter_func, '__call__'): # raise ValueError('Filter is not callable') if filter_func: array = arr[filter_func(arr)] # Indexing with Boolean Arrays # if converter_func != None: # if not hasattr(converter_func, '__call__'): # raise ValueError('Converter is not callable') if converter_func: arr = converter_func(arr) return array
java
public void offerStatement(int stmtHash, int offset, ByteBuffer psetBuffer) { m_inputCRC.update(stmtHash); m_inputCRC.updateFromPosition(offset, psetBuffer); if (m_hashCount < MAX_HASHES_COUNT) { m_hashes[m_hashCount] = stmtHash; m_hashes[m_hashCount + 1] = (int) m_inputCRC.getValue(); } m_hashCount += 2; }