language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public LambdaDslJsonArray or(Object value, MatchingRule... rules) { pactArray.or(value, rules); return this; }
python
def _generate_config(self): """Generate a configuration that can be sent to the Hottop roaster. Configuration settings need to be represented inside of a byte array that is then written to the serial interface. Much of the configuration is static, but control settings are also included and pulled from the shared dictionary. :returns: Byte array of the prepared configuration. """ config = bytearray([0x00] * 36) config[0] = 0xA5 config[1] = 0x96 config[2] = 0xB0 config[3] = 0xA0 config[4] = 0x01 config[5] = 0x01 config[6] = 0x24 config[10] = self._config.get('heater', 0) config[11] = self._config.get('fan', 0) config[12] = self._config.get('main_fan', 0) config[16] = self._config.get('solenoid', 0) config[17] = self._config.get('drum_motor', 0) if self._config.get('heater', 0) > 0: # Override the user here since the drum MUST be on for heat config[17] = 1 config[18] = self._config.get('cooling_motor', 0) config[35] = sum([b for b in config[:35]]) & 0xFF return bytes(config)
java
public void setGlobalSecondaryIndexes(java.util.Collection<GlobalSecondaryIndexDescription> globalSecondaryIndexes) { if (globalSecondaryIndexes == null) { this.globalSecondaryIndexes = null; return; } this.globalSecondaryIndexes = new java.util.ArrayList<GlobalSecondaryIndexDescription>(globalSecondaryIndexes); }
python
def build_versioned(self, id, **kwargs): """ Builds the configurations for the Specified Set with an option to specify exact revision of a BC This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.build_versioned(id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param int id: Build Configuration Set id (required) :param str callback_url: Optional Callback URL :param bool temporary_build: Is it a temporary build or a standard build? :param bool force_rebuild: DEPRECATED: Use RebuildMode. :param bool timestamp_alignment: Should we add a timestamp during the alignment? Valid only for temporary builds. :param BuildConfigurationSetWithAuditedBCsRest body: :param str rebuild_mode: Rebuild Modes: FORCE: always rebuild all the configurations in the set; EXPLICIT_DEPENDENCY_CHECK: check if any of user defined dependencies has been update; IMPLICIT_DEPENDENCY_CHECK: check if any captured dependency has been updated; :return: BuildConfigSetRecordSingleton If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.build_versioned_with_http_info(id, **kwargs) else: (data) = self.build_versioned_with_http_info(id, **kwargs) return data
python
def _transform(self, X): """Asssume X contains only categorical features. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features) Dense array or sparse matrix. """ X = self._matrix_adjust(X) X = check_array(X, accept_sparse='csc', force_all_finite=False, dtype=int) if X.min() < 0: raise ValueError("X needs to contain only non-negative integers.") n_samples, n_features = X.shape indices = self.feature_indices_ if n_features != indices.shape[0] - 1: raise ValueError("X has different shape than during fitting." " Expected %d, got %d." % (indices.shape[0] - 1, n_features)) # Replace all indicators which were below `minimum_fraction` in the # training set by 'other' if self.minimum_fraction is not None: for column in range(X.shape[1]): if sparse.issparse(X): indptr_start = X.indptr[column] indptr_end = X.indptr[column + 1] unique = np.unique(X.data[indptr_start:indptr_end]) else: unique = np.unique(X[:, column]) for unique_value in unique: if unique_value not in self.do_not_replace_by_other_[column]: if sparse.issparse(X): indptr_start = X.indptr[column] indptr_end = X.indptr[column + 1] X.data[indptr_start:indptr_end][ X.data[indptr_start:indptr_end] == unique_value] = SPARSE_ENCODINGS['OTHER'] else: X[:, column][X[:, column] == unique_value] = SPARSE_ENCODINGS['OTHER'] if sparse.issparse(X): n_values_check = X.max(axis=0).toarray().flatten() + 1 else: n_values_check = np.max(X, axis=0) + 1 # Replace all indicators which are out of bounds by 'other' (index 0) if (n_values_check > self.n_values_).any(): # raise ValueError("Feature out of bounds. Try setting n_values.") for i, n_value_check in enumerate(n_values_check): if (n_value_check - 1) >= self.n_values_[i]: if sparse.issparse(X): indptr_start = X.indptr[i] indptr_end = X.indptr[i+1] X.data[indptr_start:indptr_end][X.data[indptr_start:indptr_end] >= self.n_values_[i]] = 0 else: X[:, i][X[:, i] >= self.n_values_[i]] = 0 if sparse.issparse(X): row_indices = X.indices column_indices = [] for i in range(len(X.indptr) - 1): nbr = X.indptr[i + 1] - X.indptr[i] column_indices_ = [indices[i]] * nbr column_indices_ += X.data[X.indptr[i]:X.indptr[i + 1]] column_indices.extend(column_indices_) data = np.ones(X.data.size) else: column_indices = (X + indices[:-1]).ravel() row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), n_features) data = np.ones(n_samples * n_features) out = sparse.coo_matrix((data, (row_indices, column_indices)), shape=(n_samples, indices[-1]), dtype=self.dtype).tocsc() out = out[:, self.active_features_] return out.tocsr() if self.sparse else out.toarray()
java
public static NodeImpl createContainerElementNode(final String name, final NodeImpl parent) { return new NodeImpl( // name, // parent, // false, // null, // null, // ElementKind.CONTAINER_ELEMENT, // EMPTY_CLASS_ARRAY, // null, // null, // null, // null // ); }
java
boolean removeObserver(ApptentiveNotificationObserver observer) { int index = indexOf(observer); if (index != -1) { observers.remove(index); return true; } return false; }
python
def open_required(func): """Decorator to specify that the J-Link DLL must be opened, and a J-Link connection must be established. Args: func (function): function being decorated Returns: The wrapper function. """ @functools.wraps(func) def wrapper(self, *args, **kwargs): """Wrapper function to check that the given ``JLink`` has been opened. Args: self (JLink): the ``JLink`` instance args: list of arguments to pass to the wrapped function kwargs: key-word arguments dict to pass to the wrapped function Returns: The return value of the wrapped function. Raises: JLinkException: if the J-Link DLL is not open or the J-Link is disconnected. """ if not self.opened(): raise errors.JLinkException('J-Link DLL is not open.') elif not self.connected(): raise errors.JLinkException('J-Link connection has been lost.') return func(self, *args, **kwargs) return wrapper
java
@SuppressWarnings("rawtypes") protected String[] parseSequenceParameter(Map params, String paramName) throws TemplateModelException { Object paramModel = params.get(paramName); if (paramModel == null) { return null; } if (!(paramModel instanceof SimpleSequence)) { throw new IllegalArgumentException(paramName + " must be sequence"); } List<String> list = transformSimpleSequenceAsStringList((SimpleSequence)paramModel, paramName); return list.toArray(new String[0]); }
python
def dumps(obj, preserve=False): """Stringifies a dict as toml :param obj: the object to be dumped into toml :param preserve: optional flag to preserve the inline table in result """ f = StringIO() dump(obj, f, preserve) return f.getvalue()
python
async def expand(self, request: Request, layer: BaseLayer): """ Expand a layer into a list of layers including the pauses. """ if isinstance(layer, lyr.RawText): t = self.reading_time(layer.text) yield layer yield lyr.Sleep(t) elif isinstance(layer, lyr.MultiText): texts = await render(layer.text, request, True) for text in texts: t = self.reading_time(text) yield lyr.RawText(text) yield lyr.Sleep(t) elif isinstance(layer, lyr.Text): text = await render(layer.text, request) t = self.reading_time(text) yield lyr.RawText(text) yield lyr.Sleep(t) else: yield layer
python
def line_is_continuation(line: str) -> bool: """ Args: line Returns: True iff line is a continuation line, else False. """ llstr = line.lstrip() return len(llstr) > 0 and llstr[0] == "&"
java
public void marshall(Cluster cluster, ProtocolMarshaller protocolMarshaller) { if (cluster == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(cluster.getBackupPolicy(), BACKUPPOLICY_BINDING); protocolMarshaller.marshall(cluster.getClusterId(), CLUSTERID_BINDING); protocolMarshaller.marshall(cluster.getCreateTimestamp(), CREATETIMESTAMP_BINDING); protocolMarshaller.marshall(cluster.getHsms(), HSMS_BINDING); protocolMarshaller.marshall(cluster.getHsmType(), HSMTYPE_BINDING); protocolMarshaller.marshall(cluster.getPreCoPassword(), PRECOPASSWORD_BINDING); protocolMarshaller.marshall(cluster.getSecurityGroup(), SECURITYGROUP_BINDING); protocolMarshaller.marshall(cluster.getSourceBackupId(), SOURCEBACKUPID_BINDING); protocolMarshaller.marshall(cluster.getState(), STATE_BINDING); protocolMarshaller.marshall(cluster.getStateMessage(), STATEMESSAGE_BINDING); protocolMarshaller.marshall(cluster.getSubnetMapping(), SUBNETMAPPING_BINDING); protocolMarshaller.marshall(cluster.getVpcId(), VPCID_BINDING); protocolMarshaller.marshall(cluster.getCertificates(), CERTIFICATES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def unstack(self, unstacker_func, fill_value): """Return a blockmanager with all blocks unstacked. Parameters ---------- unstacker_func : callable A (partially-applied) ``pd.core.reshape._Unstacker`` class. fill_value : Any fill_value for newly introduced missing values. Returns ------- unstacked : BlockManager """ n_rows = self.shape[-1] dummy = unstacker_func(np.empty((0, 0)), value_columns=self.items) new_columns = dummy.get_new_columns() new_index = dummy.get_new_index() new_blocks = [] columns_mask = [] for blk in self.blocks: blocks, mask = blk._unstack( partial(unstacker_func, value_columns=self.items[blk.mgr_locs.indexer]), new_columns, n_rows, fill_value ) new_blocks.extend(blocks) columns_mask.extend(mask) new_columns = new_columns[columns_mask] bm = BlockManager(new_blocks, [new_columns, new_index]) return bm
python
def _process_state(self): """Process the application state configuration. Google Alerts manages the account information and alert data through some custom state configuration. Not all values have been completely enumerated. """ self._log.debug("Capturing state from the request") response = self._session.get(url=self.ALERTS_URL, headers=self.HEADERS) soup = BeautifulSoup(response.content, "html.parser") for i in soup.findAll('script'): if i.text.find('window.STATE') == -1: continue state = json.loads(i.text[15:-1]) if state != "": self._state = state self._log.debug("State value set: %s" % self._state) return self._state
python
def duration_outside_nwh( self, starttime: datetime.time = datetime.time(NORMAL_DAY_START_H), endtime: datetime.time = datetime.time(NORMAL_DAY_END_H), weekdays_only: bool = False, weekends_only: bool = False) -> datetime.timedelta: """ Returns a duration (a ``datetime.timedelta`` object) representing the number of hours outside normal working hours. This is not simply a subset of :meth:`day_night_duration`, because weekends are treated differently (they are always out of hours). The options allow the calculation of components on weekdays or weekends only. """ if weekdays_only and weekends_only: raise ValueError("Can't have weekdays_only and weekends_only") ooh = datetime.timedelta() # ooh = out of (normal) hours startdate = self.start.date() enddate = self.end.date() ndays = (enddate - startdate).days + 1 for i in range(ndays): date = startdate + datetime.timedelta(days=i) component = self.component_on_date(date) # ... an interval on a single day if not is_normal_working_day(date): if weekdays_only: continue ooh += component.duration() # all is out-of-normal-hours else: if weekends_only: continue normalday = Interval.daytime(date, starttime, endtime) normalpart = component.intersection(normalday) if normalpart is not None: ooh += component.duration() - normalpart.duration() else: ooh += component.duration() return ooh
python
def ls_demux(sel, ls_di, lsls_do): """ Demultiplexes an input signal structure to list of output structures. A structure is represented by a list of signals: [signal_1, signal_2, ..., signal_n] lsls_do[sel][0] = ls_di[0] lsls_do[sel][1] = ls_di[1] ... lsls_do[sel][n] = ls_di[n] sel - select index ls_di - input signal structure: [sig, sig, ..., sig] lsls_do - list of output signal structures: [[sig, sig, ..., sig], [sig, sig, ..., sig], ..., [sig, sig, ..., sig]] """ N = len (ls_di) lsls_out = [list(x) for x in zip(*lsls_do)] return [demux(sel, ls_di[i], lsls_out[i])for i in range(N)]
java
public static String decode(String encoded) { String raw = encoded.replace("%2F", "/"); raw = raw.replace("%2A", "%"); return raw; }
java
public final void elementValuePair() throws RecognitionException { int elementValuePair_StartIndex = input.index(); try { if ( state.backtracking>0 && alreadyParsedRule(input, 68) ) { return; } // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:625:5: ( ( Identifier '=' )? elementValue ) // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:625:7: ( Identifier '=' )? elementValue { // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:625:7: ( Identifier '=' )? int alt90=2; int LA90_0 = input.LA(1); if ( (LA90_0==Identifier) ) { int LA90_1 = input.LA(2); if ( (LA90_1==54) ) { alt90=1; } } switch (alt90) { case 1 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:625:8: Identifier '=' { match(input,Identifier,FOLLOW_Identifier_in_elementValuePair2383); if (state.failed) return; match(input,54,FOLLOW_54_in_elementValuePair2385); if (state.failed) return; } break; } pushFollow(FOLLOW_elementValue_in_elementValuePair2389); elementValue(); state._fsp--; if (state.failed) return; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving if ( state.backtracking>0 ) { memoize(input, 68, elementValuePair_StartIndex); } } }
python
def _extract_split_and_discordants(in_bam, work_dir, data): """Retrieve split-read alignments from input BAM file. """ sr_file = os.path.join(work_dir, "%s-sr.bam" % os.path.splitext(os.path.basename(in_bam))[0]) disc_file = os.path.join(work_dir, "%s-disc.bam" % os.path.splitext(os.path.basename(in_bam))[0]) if not utils.file_exists(sr_file) or not utils.file_exists(disc_file): with file_transaction(data, sr_file) as tx_sr_file: with file_transaction(data, disc_file) as tx_disc_file: cores = dd.get_num_cores(data) ref_file = dd.get_ref_file(data) cmd = ("extract-sv-reads -e --threads {cores} -T {ref_file} " "-i {in_bam} -s {tx_sr_file} -d {tx_disc_file}") do.run(cmd.format(**locals()), "extract split and discordant reads", data) for fname in [sr_file, disc_file]: bam.index(fname, data["config"]) return sr_file, disc_file
python
def has_active_condition(self, condition, instances): """ Given a list of instances, and the condition active for this switch, returns a boolean representing if the conditional is met, including a non-instance default. """ return_value = None for instance in instances + [None]: if not self.can_execute(instance): continue result = self.is_active(instance, condition) if result is False: return False elif result is True: return_value = True return return_value
python
def p_default_option(self, p): """default_option : EQ primitive | EQ tag_ref | empty""" if p[1]: if isinstance(p[2], AstTagRef): p[0] = p[2] else: p[0] = p[2]
java
public void marshall(ThingGroupIndexingConfiguration thingGroupIndexingConfiguration, ProtocolMarshaller protocolMarshaller) { if (thingGroupIndexingConfiguration == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(thingGroupIndexingConfiguration.getThingGroupIndexingMode(), THINGGROUPINDEXINGMODE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def _get_web_auth_token(self): """ Retrieves a token from the network for web authentication. The token then has to be authorized from getAuthURL before creating session. """ request = _Request(self.network, "auth.getToken") # default action is that a request is signed only when # a session key is provided. request.sign_it() doc = request.execute() e = doc.getElementsByTagName("token")[0] return e.firstChild.data
python
def output_domain(gandi, domain, output_keys, justify=12): """ Helper to output a domain information.""" if 'nameservers' in domain: domain['nameservers'] = format_list(domain['nameservers']) if 'services' in domain: domain['services'] = format_list(domain['services']) if 'tags' in domain: domain['tags'] = format_list(domain['tags']) output_generic(gandi, domain, output_keys, justify) if 'created' in output_keys: output_line(gandi, 'created', domain['date_created'], justify) if 'expires' in output_keys: date_end = domain.get('date_registry_end') if date_end: days_left = (date_end - datetime.now()).days output_line(gandi, 'expires', '%s (in %d days)' % (date_end, days_left), justify) if 'updated' in output_keys: output_line(gandi, 'updated', domain['date_updated'], justify)
java
public static AssertionError expectFailure(StandardSubjectBuilderCallback assertionCallback) { ExpectFailure expectFailure = new ExpectFailure(); expectFailure.enterRuleContext(); // safe since this instance doesn't leave this method assertionCallback.invokeAssertion(expectFailure.whenTesting()); return expectFailure.getFailure(); }
java
public static String escape(String str) { int i, max; StringBuilder result; char c; max = str.length(); for (i = 0; i < max; i++) { if (str.charAt(i) < 32) { break; } } if (i == max) { return str; } result = new StringBuilder(max + 10); for (i = 0; i < max; i++) { c = str.charAt(i); switch (c) { case '\n': result.append("\\n"); break; case '\r': result.append("\\r"); break; case '\t': result.append("\\t"); break; case '\\': result.append("\\\\"); break; default: if (c < 32) { result.append("\\u").append(Strings.padLeft(Integer.toHexString(c), 4, '0')); } else { result.append(c); } } } return result.toString(); }
python
def eeg_psd(raw, sensors_include="all", sensors_exclude=None, fmin=0.016, fmax=60, method="multitaper", proj=False): """ Compute Power-Spectral Density (PSD). Parameters ---------- raw : mne.io.Raw Raw EEG data. sensors_include : str Sensor area to include. See :func:`neurokit.eeg_select_sensors()`. sensors_exclude : str Sensor area to exclude. See :func:`neurokit.eeg_select_sensors()`. fmin : float Min frequency of interest. fmax: float Max frequency of interest. method : str "multitaper" or "welch". proj : bool add projectors. Returns ---------- mean_psd : pandas.DataFrame Averaged PSDs. Example ---------- >>> import neurokit as nk Notes ---------- *Details* - Delta: 1-3Hz - Theta: 4-7Hz - Alpha1: 8-9Hz - Alpha2: 10-12Hz - Beta1: 13-17Hz - Beta2: 18-30Hz - Gamma1: 31-40Hz - Gamma2: 41-50Hz - Mu: 8-13Hz *Authors* - Dominique Makowski (https://github.com/DominiqueMakowski) References ------------ - None """ picks = mne.pick_types(raw.info, include=eeg_select_electrodes(include=sensors_include, exclude=sensors_exclude), exclude="bads") if method == "multitaper": psds, freqs = mne.time_frequency.psd_multitaper(raw, fmin=fmin, fmax=fmax, low_bias=True, proj=proj, picks=picks) else: psds, freqs = mne.time_frequency.psd_welch(raw, fmin=fmin, fmax=fmax, proj=proj, picks=picks) tf = pd.DataFrame(psds) tf.columns = eeg_name_frequencies(freqs) tf = tf.mean(axis=0) mean_psd = {} for freq in ["UltraLow", "Delta", "Theta", "Alpha", "Alpha1", "Alpha2", "Mu", "Beta", "Beta1", "Beta2", "Gamma", "Gamma1", "Gamma2", "UltraHigh"]: mean_psd[freq] = tf[[freq in s for s in tf.index]].mean() mean_psd = pd.DataFrame.from_dict(mean_psd, orient="index").T return(mean_psd)
python
def refresh(self, using=None, **kwargs): """ Preforms a refresh operation on the index. Any additional keyword arguments will be passed to ``Elasticsearch.indices.refresh`` unchanged. """ return self._get_connection(using).indices.refresh(index=self._name, **kwargs)
python
def _add_dep(self, dep): """ Increment the reference count for *dep*. If this is a new dependency, then connect to its *changed* event. """ if dep in self._deps: self._deps[dep] += 1 else: self._deps[dep] = 1 dep._dependents[self] = None
java
void localGoto(String name, float llx, float lly, float urx, float ury) { PdfAction action = getLocalGotoAction(name); annotationsImp.addPlainAnnotation(new PdfAnnotation(writer, llx, lly, urx, ury, action)); }
python
def from_path(path): """ Selects and returns a build class based on project structure/config from a given path. :param path(str): required path argument to be used """ for item in ref: build = ref[item] valid_ = build['is_valid'] if valid_(path) is True: return build['builder'](path) raise errors.InvalidProjectStructure()
python
def pca(df, n_components=2, mean_center=False, fcol=None, ecol=None, marker='o', markersize=40, threshold=None, label_threshold=None, label_weights=None, label_scores=None, return_df=False, show_covariance_ellipse=False, *args, **kwargs): """ Perform Principal Component Analysis (PCA) from input DataFrame and generate scores and weights plots. Principal Component Analysis is a technique for identifying the largest source of variation in a dataset. This function uses the implementation available in scikit-learn. The PCA is calculated via `analysis.pca` and will therefore give identical results. Resulting scores and weights plots are generated showing the distribution of samples within the resulting PCA space. Sample color and marker size can be controlled by label, lookup and calculation (lambda) to generate complex plots highlighting sample separation. For further information see the examples included in the documentation. :param df: Pandas `DataFrame` :param n_components: `int` number of Principal components to return :param mean_center: `bool` mean center the data before performing PCA :param fcol: `dict` of indexers:colors, where colors are hex colors or matplotlib color names :param ecol: `dict` of indexers:colors, where colors are hex colors or matplotlib color names :param marker: `str` matplotlib marker name (default "o") :param markersize: `int` or `callable` which returns an `int` for a given indexer :param threshold: `float` weight threshold for plot (horizontal line) :param label_threshold: `float` weight threshold over which to draw labels :param label_weights: `list` of `str` :param label_scores: `list` of `str` :param return_df: `bool` return the resulting scores, weights as pandas DataFrames :param show_covariance_ellipse: `bool` show the covariance ellipse around each group :param args: additional arguments passed to analysis.pca :param kwargs: additional arguments passed to analysis.pca :return: """ scores, weights = analysis.pca(df, n_components=n_components, *args, **kwargs) scores_ax = _pca_scores(scores, fcol=fcol, ecol=ecol, marker=marker, markersize=markersize, label_scores=label_scores, show_covariance_ellipse=show_covariance_ellipse) weights_ax = [] for pc in range(0, weights.shape[1]): weights_ax.append( _pca_weights(weights, pc, threshold=threshold, label_threshold=label_threshold, label_weights=label_weights) ) if return_df: return scores, weights else: return scores_ax, weights_ax
java
@Override public void handleRequest(final Request request) { if (!isDisabled()) { final SelectToggleModel model = getComponentModel(); String requestParam = request.getParameter(getId()); final State newValue; if ("all".equals(requestParam)) { newValue = State.ALL; } else if ("none".equals(requestParam)) { newValue = State.NONE; } else if ("some".equals(requestParam)) { newValue = State.SOME; } else { newValue = model.state; } if (!newValue.equals(model.state)) { setState(newValue); } if (!model.clientSide && model.target != null && !State.SOME.equals(newValue)) { // We need to change the selections *after* all components // Have updated themselves from the request, as they may change // their values when their handleRequest methods are called. invokeLater(new Runnable() { @Override public void run() { setSelections(model.target, State.ALL.equals(newValue)); } }); } } }
python
def findAllExceptions(pathToCheck): """ Find patterns of exceptions in a file or folder. @param patternFinder: a visitor for pattern checking and save results @return: patterns of special functions and classes """ finder = PatternFinder() if os.path.isfile(pathToCheck): with open(pathToCheck) as f: findPatternsInFile(f.read(), finder) else: for path, dirs, files in os.walk(pathToCheck): for file in files: _, extname = os.path.splitext(file) if extname == ".py": pathFile = os.path.join(path, file) with open(pathFile) as f: findPatternsInFile(f.read(), finder) return finder.patternsFunc, finder.patternsClass
python
def range_intersect(a, b, extend=0): """ Returns the intersection between two reanges. >>> range_intersect((30, 45), (55, 65)) >>> range_intersect((48, 65), (45, 55)) [48, 55] """ a_min, a_max = a if a_min > a_max: a_min, a_max = a_max, a_min b_min, b_max = b if b_min > b_max: b_min, b_max = b_max, b_min if a_max + extend < b_min or b_max + extend < a_min: return None i_min = max(a_min, b_min) i_max = min(a_max, b_max) if i_min > i_max + extend: return None return [i_min, i_max]
java
public static XContentBuilder marshall(GatewayBean bean) throws StorageException { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { preMarshall(bean); builder .startObject() .field("id", bean.getId()) .field("name", bean.getName()) .field("description", bean.getDescription()) .field("type", bean.getType()) .field("configuration", bean.getConfiguration()) .field("createdBy", bean.getCreatedBy()) .field("createdOn", bean.getCreatedOn().getTime()) .field("modifiedBy", bean.getModifiedBy()) .field("modifiedOn", bean.getModifiedOn().getTime()) .endObject(); postMarshall(bean); return builder; } catch (IOException e) { throw new StorageException(e); } }
java
public void offlineRegion(String resourceGroupName, String accountName, String region) { offlineRegionWithServiceResponseAsync(resourceGroupName, accountName, region).toBlocking().last().body(); }
java
public static WxOutMsg respVideo(String to, String mediaId, String title, String description) { WxOutMsg out = new WxOutMsg("video"); out.setVideo(new WxVideo(mediaId, title, description)); if (to != null) out.setToUserName(to); return out; }
python
def printTPRegionParams(tpregion): """ Note: assumes we are using TemporalMemory/TPShim in the TPRegion """ tm = tpregion.getSelf()._tfdr print "------------PY TemporalMemory Parameters ------------------" print "numberOfCols =", tm.columnDimensions print "cellsPerColumn =", tm.cellsPerColumn print "minThreshold =", tm.minThreshold print "activationThreshold =", tm.activationThreshold print "newSynapseCount =", tm.maxNewSynapseCount print "initialPerm =", tm.initialPermanence print "connectedPerm =", tm.connectedPermanence print "permanenceInc =", tm.permanenceIncrement print "permanenceDec =", tm.permanenceDecrement print "predictedSegmentDecrement=", tm.predictedSegmentDecrement print
java
public static double nextDouble(Random random, final double min, final double max) { Validate.isTrue(max >= min, "Start value must be smaller or equal to end value."); MoreValidate.nonNegative("min", min); if (Double.compare(min, max) == 0) { return min; } return min + ((max - min) * random.nextDouble()); }
java
public static nssimpleacl6_stats get(nitro_service service) throws Exception{ nssimpleacl6_stats obj = new nssimpleacl6_stats(); nssimpleacl6_stats[] response = (nssimpleacl6_stats[])obj.stat_resources(service); return response[0]; }
java
public OpenIdUserInfo getInfo(String authorization) throws ApiException { ApiResponse<OpenIdUserInfo> resp = getInfoWithHttpInfo(authorization); return resp.getData(); }
python
def _set_learning_mode(self, v, load=False): """ Setter method for learning_mode, mapped from YANG variable /mac_address_table/learning_mode (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_learning_mode is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_learning_mode() directly. YANG Description: Conversational Learning Mode """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'conversational': {'value': 1}},), is_leaf=True, yang_name="learning-mode", rest_name="learning-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Conversational Learning Mode', u'cli-full-command': None, u'callpoint': u'learning-mode-callpoint', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-mac-address-table', defining_module='brocade-mac-address-table', yang_type='enumeration', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """learning_mode must be of a type compatible with enumeration""", 'defined-type': "brocade-mac-address-table:enumeration", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'conversational': {'value': 1}},), is_leaf=True, yang_name="learning-mode", rest_name="learning-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Conversational Learning Mode', u'cli-full-command': None, u'callpoint': u'learning-mode-callpoint', u'display-when': u'/vcsmode/vcs-mode = "true"'}}, namespace='urn:brocade.com:mgmt:brocade-mac-address-table', defining_module='brocade-mac-address-table', yang_type='enumeration', is_config=True)""", }) self.__learning_mode = t if hasattr(self, '_set'): self._set()
python
def cholesky(A, ordering_method='default', return_type=RETURN_P_L, use_long=False): ''' P A P' = L L' ''' logger.debug('Calculating cholesky decomposition for matrix {!r} with ordering method {}, return type {} and use_long {}.'.format(A, ordering_method, return_type, use_long)) ## check input return_types = (RETURN_L, RETURN_L_D, RETURN_P_L, RETURN_P_L_D) if ordering_method not in CHOLMOD_ORDERING_METHODS: raise ValueError('Unknown ordering method {}. Only values in {} are supported.'.format(ordering_method, CHOLMOD_ORDERING_METHODS)) if return_type not in return_types: raise ValueError('Unknown return type {}. Only values in {} are supported.'.format(return_type, return_types)) if ordering_method != 'natural' and return_type in (RETURN_L, RETURN_L_D): raise ValueError('Return type {} is only supported for "natural" ordering method.'.format(return_type)) #TODO symmetry check A = util.math.sparse.check.sorted_squared_csc(A) ## calculate cholesky decomposition try: try: f = sksparse.cholmod.cholesky(A, ordering_method=ordering_method, use_long=use_long) except sksparse.cholmod.CholmodTooLargeError as e: if not use_long: warnings.warn('Problem to large for int, switching to long.') return cholesky(A, ordering_method=ordering_method, return_type=return_type, use_long=True) else: raise except sksparse.cholmod.CholmodNotPositiveDefiniteError as e: raise util.math.matrix.NoPositiveDefiniteMatrixError(A, 'Row/column {} makes matrix not positive definite.'.format(e.column)) del A ## calculate permutation matrix p = f.P() n = len(p) if return_type in (RETURN_P_L, RETURN_P_L_D): P = scipy.sparse.dok_matrix((n,n), dtype=np.int8) for i in range(n): P[i,p[i]] = 1 P = P.tocsr() P.astype(np.int8) ## return P, L if return_type in (RETURN_L, RETURN_P_L): L = f.L().tocsr() if return_type == RETURN_L: assert np.all(p == np.arange(n)) logger.debug('Returning lower triangular matrix {!r}.'.format(L)) return (L,) else: logger.debug('Returning permutation matrix {!r} and lower triangular matrix {!r}.'.format(P, L)) return (P, L) ## return P, L, D if return_type in (RETURN_L_D, RETURN_P_L_D): L, D = f.L_D() # Do not use f.L_D() -> higher memory consumption # LD = f.LD() if return_type == RETURN_L_D: logger.debug('Returning lower triangular matrix {!r} and diagonal matrix {!r}.'.format(P, L, D)) return (L, D) else: logger.debug('Returning permutation matrix {!r}, lower triangular matrix {!r} and diagonal matrix {!r}.'.format(P, L, D)) return (P, L, D)
python
def gen_submodule_names(package): """Walk package and yield names of all submodules :type package: package :param package: The package to get submodule names of :returns: Iterator that yields names of all submodules of ``package`` :rtype: Iterator that yields ``str`` """ for importer, modname, ispkg in pkgutil.walk_packages( path=package.__path__, prefix=package.__name__ + '.', onerror=lambda x: None): yield modname
java
public void writeValue(@NotNull Object entity, @Nullable Object value) { try { getWriteMethod().invoke(entity, value); } catch (InvocationTargetException | IllegalAccessException e) { LOGGER.warn("Can't invoker write method", e); } }
python
def fieldnames(self, keyword=''): """Get the names of the fields in a table keyword value. The value of a keyword can be a struct (python dict). This method returns the names of the fields in that struct. Each field in a struct can be a struct in itself. Names of fields in a sub-struct can be obtained by giving a keyword name consisting of multiple parts separated by dots (e.g. 'key1.sub1.sub2'). If an empty keyword name is given (which is the default), all table keyword names are shown and its behaviour is the same as :func:`keywordnames`. Instead of a keyword name an index can be given which returns the names of the struct value of the i-th keyword. """ if isinstance(keyword, str): return self._getfieldnames('', keyword, -1) else: return self._getfieldnames('', '', keyword)
java
public ServiceFuture<WorkerPoolResourceInner> getWorkerPoolAsync(String resourceGroupName, String name, String workerPoolName, final ServiceCallback<WorkerPoolResourceInner> serviceCallback) { return ServiceFuture.fromResponse(getWorkerPoolWithServiceResponseAsync(resourceGroupName, name, workerPoolName), serviceCallback); }
java
public void setAttribute(String name, String value, String facet) throws JspException { // validate the name attribute, in the case of an error simply return. if (name == null || name.length() <= 0) { String s = Bundle.getString("Tags_AttributeNameNotSet"); registerTagError(s, null); return; } // it's not legal to set the id or name attributes this way if (name != null && (name.equals("netui:treeName"))) { String s = Bundle.getString("Tags_AttributeMayNotBeSet", new Object[]{name}); registerTagError(s, null); return; } _divState.registerAttribute(AbstractHtmlState.ATTR_GENERAL, name, value); }
python
def dbmax_stddev(self, value=None): """ Corresponds to IDD Field `dbmax_stddev` Standard deviation of extreme annual maximum dry-bulb temperature Args: value (float): value for IDD Field `dbmax_stddev` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dbmax_stddev`'.format(value)) self._dbmax_stddev = value
java
private CellRendererPane createCustomCellRendererPane() { return new CellRendererPane() { @Override public void paintComponent(Graphics graphics, Component component, Container container, int x, int y, int w, int h, boolean shouldValidate) { int rowAtPoint = table.rowAtPoint(new Point(x, y)); boolean isSelected = table.isRowSelected(rowAtPoint); if (component instanceof JComponent && component instanceof UIResource) { JComponent jComponent = (JComponent) component; jComponent.setOpaque(true); jComponent.setBorder(isSelected ? getSelectedRowBorder() : getRowBorder()); jComponent.setBackground(isSelected ? jComponent.getBackground() : transparentColor); if (isSelected) { jComponent.setForeground(unwrap(table.getSelectionForeground())); jComponent.setBackground(unwrap(table.getSelectionBackground())); } else { jComponent.setForeground(unwrap(table.getForeground())); jComponent.setBackground(transparentColor); } } super.paintComponent(graphics, component, container, x, y, w, h, shouldValidate); } /** * DOCUMENT ME! * * @param c DOCUMENT ME! * * @return DOCUMENT ME! */ private Color unwrap(Color c) { if (c instanceof UIResource) { return new Color(c.getRGB()); } return c; } /** * @see javax.swing.JComponent#isOpaque() */ @SuppressWarnings("unused") public boolean isOpaque(int x, int y) { int rowAtPoint = table.rowAtPoint(new Point(x, y)); return table.isRowSelected(rowAtPoint) ? true : super.isOpaque(); } }; }
python
def _scan_pth_files(dir_paths): """Given an iterable of directory paths, yield paths to all .pth files within.""" for dir_path in dir_paths: if not os.path.exists(dir_path): continue pth_filenames = (f for f in os.listdir(dir_path) if f.endswith('.pth')) for pth_filename in pth_filenames: yield os.path.join(dir_path, pth_filename)
python
def upload_path(instance, filename): ''' This method is created to return the path to upload files. This path must be different from any other to avoid problems. ''' path_separator = "/" date_separator = "-" ext_separator = "." empty_string = "" # get the model name model_name = model_inspect(instance)['modelname'] # get the string date date = datetime.now().strftime("%Y-%m-%d").split(date_separator) curr_day = date[2] curr_month = date[1] curr_year = date[0] split_filename = filename.split(ext_separator) filename = empty_string.join(split_filename[:-1]) file_ext = split_filename[-1] new_filename = empty_string.join([filename, str(random.random()).split(ext_separator)[1]]) new_filename = ext_separator.join([new_filename, file_ext]) string_path = path_separator.join([model_name, curr_year, curr_month, curr_day, new_filename]) # the path is built using the current date and the modelname return string_path
java
protected int getNumOfCols() { if (!columns.isEmpty()) return getHeadRegularColCnt() + columns.size(); if (loaded && !colTitleWidgets.isEmpty()) return getHeadRegularColCnt() + colTitleWidgets.size(); return getHeadRegularColCnt(); }
python
def bend_miter_Miller(Di, angle, Re, roughness=0.0, L_unimpeded=None): r'''Calculates the loss coefficient for a single miter bend according to Miller [1]_. This is a sophisticated model which uses corrections for pipe roughness, the length of the pipe downstream before another interruption, and a correction for Reynolds number. It interpolates several times using several corrections graphs in [1]_. Parameters ---------- Di : float Inside diameter of pipe, [m] angle : float Angle of miter bend, [degrees] Re : float Reynolds number of the pipe (no specification if inlet or outlet properties should be used), [m] roughness : float, optional Roughness of bend wall, [m] L_unimpeded : float, optional The length of unimpeded pipe without any fittings, instrumentation, or flow disturbances downstream (assumed 20 diameters if not specified), [m] Returns ------- K : float Loss coefficient [-] Notes ----- Note the loss coefficient includes the surface friction of the pipe as if it was straight. Examples -------- >>> bend_miter_Miller(Di=.6, angle=90, Re=2e6, roughness=2e-5, ... L_unimpeded=30*.6) 1.1921574594947668 References ---------- .. [1] Miller, Donald S. Internal Flow Systems: Design and Performance Prediction. Gulf Publishing Company, 1990. ''' if L_unimpeded is None: L_unimpeded = 20.0*Di if angle > 120: angle = 120.0 Kb = horner(bend_miter_Miller_coeffs, 1.0/60.0*(angle-60.0)) C_o = Miller_bend_unimpeded_correction(Kb=Kb, Di=Di, L_unimpeded=L_unimpeded) C_roughness = Miller_bend_roughness_correction(Re=Re, Di=Di, roughness=roughness) Re_C_Re = min(max(Re, 1E4), 1E8) C_Re_1 = bend_rounded_Miller_C_Re(Re_C_Re, 1.0) if Re_C_Re < 207956.58904584477 else 1.0 C_Re = Kb/(Kb - 0.2*C_Re_1 + 0.2) if C_Re > 2.2 or C_Re < 0: C_Re = 2.2 return Kb*C_Re*C_roughness*C_o
python
def host_report_msg(hostname, module_name, result, oneline): ''' summarize the JSON results for a particular host ''' failed = utils.is_failed(result) msg = '' if module_name in [ 'command', 'shell', 'raw' ] and 'ansible_job_id' not in result and result.get('parsed',True) != False: if not failed: msg = command_generic_msg(hostname, result, oneline, 'success') else: msg = command_generic_msg(hostname, result, oneline, 'FAILED') else: if not failed: msg = regular_generic_msg(hostname, result, oneline, 'success') else: msg = regular_generic_msg(hostname, result, oneline, 'FAILED') return msg
python
def hotstart(self): """ Prepare simulation hotstart info """ if self.write_hotstart: hotstart_time_str = self.event_manager.simulation_end.strftime("%Y%m%d_%H%M") try: os.mkdir('hotstart') except OSError: pass ov_hotstart_path = os.path.join('..', 'hotstart', '{0}_ov_hotstart_{1}.ovh'.format(self.project_manager.name, hotstart_time_str)) self._update_card("WRITE_OV_HOTSTART", ov_hotstart_path, True) chan_hotstart_path = os.path.join('..', 'hotstart', '{0}_chan_hotstart_{1}'.format(self.project_manager.name, hotstart_time_str)) self._update_card("WRITE_CHAN_HOTSTART", chan_hotstart_path, True) sm_hotstart_path = os.path.join('..', 'hotstart', '{0}_sm_hotstart_{1}.smh'.format(self.project_manager.name, hotstart_time_str)) self._update_card("WRITE_SM_HOTSTART", sm_hotstart_path, True) else: self._delete_card("WRITE_OV_HOTSTART") self._delete_card("WRITE_CHAN_HOTSTART") self._delete_card("WRITE_SM_HOTSTART") if self.read_hotstart: hotstart_time_str = self.event_manager.simulation_start.strftime("%Y%m%d_%H%M") # OVERLAND expected_ov_hotstart = os.path.join('hotstart', '{0}_ov_hotstart_{1}.ovh'.format(self.project_manager.name, hotstart_time_str)) if os.path.exists(expected_ov_hotstart): self._update_card("READ_OV_HOTSTART", os.path.join("..", expected_ov_hotstart), True) else: self._delete_card("READ_OV_HOTSTART") log.warning("READ_OV_HOTSTART not included as " "{0} does not exist ...".format(expected_ov_hotstart)) # CHANNEL expected_chan_hotstart = os.path.join('hotstart', '{0}_chan_hotstart_{1}'.format(self.project_manager.name, hotstart_time_str)) if os.path.exists("{0}.qht".format(expected_chan_hotstart)) \ and os.path.exists("{0}.dht".format(expected_chan_hotstart)): self._update_card("READ_CHAN_HOTSTART", os.path.join("..", expected_chan_hotstart), True) else: self._delete_card("READ_CHAN_HOTSTART") log.warning("READ_CHAN_HOTSTART not included as " "{0}.qht and/or {0}.dht does not exist ...".format(expected_chan_hotstart)) # INFILTRATION expected_sm_hotstart = os.path.join('hotstart', '{0}_sm_hotstart_{1}.smh'.format(self.project_manager.name, hotstart_time_str)) if os.path.exists(expected_sm_hotstart): self._update_card("READ_SM_HOTSTART", os.path.join("..", expected_sm_hotstart), True) else: self._delete_card("READ_SM_HOTSTART") log.warning("READ_SM_HOTSTART not included as" " {0} does not exist ...".format(expected_sm_hotstart))
java
public void setSubObj(Integer newSubObj) { Integer oldSubObj = subObj; subObj = newSubObj; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.OBJECT_COUNT__SUB_OBJ, oldSubObj, subObj)); }
python
def get_interactive_console(thread_id, frame_id, frame, console_message): """returns the global interactive console. interactive console should have been initialized by this time :rtype: DebugConsole """ if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id: return InteractiveConsoleCache.interactive_console_instance InteractiveConsoleCache.interactive_console_instance = DebugConsole() InteractiveConsoleCache.thread_id = thread_id InteractiveConsoleCache.frame_id = frame_id console_stacktrace = traceback.extract_stack(frame, limit=1) if console_stacktrace: current_context = console_stacktrace[0] # top entry from stacktrace context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2]) console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,)) return InteractiveConsoleCache.interactive_console_instance
python
def fit(self, X): """Compute the Robust Shared Response Model Parameters ---------- X : list of 2D arrays, element i has shape=[voxels_i, timepoints] Each element in the list contains the fMRI data of one subject. """ logger.info('Starting RSRM') # Check that the regularizer value is positive if 0.0 >= self.lam: raise ValueError("Gamma parameter should be positive.") # Check the number of subjects if len(X) <= 1: raise ValueError("There are not enough subjects in the input " "data to train the model.") # Check for input data sizes if X[0].shape[1] < self.features: raise ValueError( "There are not enough timepoints to train the model with " "{0:d} features.".format(self.features)) # Check if all subjects have same number of TRs for alignment number_trs = X[0].shape[1] number_subjects = len(X) for subject in range(number_subjects): assert_all_finite(X[subject]) if X[subject].shape[1] != number_trs: raise ValueError("Different number of alignment timepoints " "between subjects.") # Create a new random state self.random_state_ = np.random.RandomState(self.rand_seed) # Run RSRM self.w_, self.r_, self.s_ = self._rsrm(X) return self
python
def _run_init_queries(self): ''' Initialization queries ''' for obj in (Package, PackageCfgFile, PayloadFile, IgnoredDir, AllowedDir): self._db.create_table_from_object(obj())
java
public static void openEmbeddedDialog( final CmsEmbeddedDialogContext dialogContext, Map<String, String[]> params, boolean includeWebOus) { String[] param = params.get(PARAM_OU); String ou; if ((param != null) && (param.length >= 1)) { ou = param[0]; } else { ou = dialogContext.getCms().getRequestContext().getCurrentUser().getOuFqn(); } boolean realOnly; param = params.get(PARAM_REAL_ONLY); if ((param != null) && (param.length >= 1)) { realOnly = Boolean.parseBoolean(param[0]); } else { realOnly = true; } WidgetType type = WidgetType.groupwidget; param = params.get(PARAM_TYPE); if ((param != null) && (param.length >= 1)) { try { type = WidgetType.valueOf(param[0]); } catch (Exception e) { // ignore } } WidgetType startType = null; param = params.get(PARAM_START_TYPE); if ((param != null) && (param.length >= 1)) { try { startType = WidgetType.valueOf(param[0]); } catch (Exception e) { // ignore } } if (startType == null) { startType = type != WidgetType.principalwidget ? type : WidgetType.groupwidget; } Window window = CmsBasicDialog.prepareWindow(DialogWidth.max); dialogContext.setWindow(window); CmsPrincipalSelectDialog dialog = new CmsPrincipalSelectDialog( null, ou, window, type, realOnly, startType, includeWebOus); dialog.setSelectHandler(new I_PrincipalSelectHandler() { public void onPrincipalSelect(String principalType, String principalName) { dialogContext.setPrincipal(principalName); } }); window.setCaption( CmsVaadinUtils.getMessageText( org.opencms.workplace.commons.Messages.GUI_PRINCIPALSELECTION_LIST_ACTION_SELECT_NAME_0)); window.setContent(dialog); A_CmsUI.get().addWindow(window); }
java
public ServiceFuture<List<FaceList>> listAsync(final ServiceCallback<List<FaceList>> serviceCallback) { return ServiceFuture.fromResponse(listWithServiceResponseAsync(), serviceCallback); }
python
def marvcli_comment_list(datasets): """Lists comments for datasets. Output: setid comment_id date time author message """ app = create_app() ids = parse_setids(datasets, dbids=True) comments = db.session.query(Comment)\ .options(db.joinedload(Comment.dataset))\ .filter(Comment.dataset_id.in_(ids)) for comment in sorted(comments, key=lambda x: (x.dataset._setid, x.id)): print(comment.dataset.setid, comment.id, datetime.datetime.fromtimestamp(int(comment.time_added / 1000)), comment.author, repr(comment.text))
python
def get_document_list(self): """ Retrieves all documents included in this project. """ try: return self.__dict__['document_list'] except KeyError: obj_list = DocumentSet([ self._connection.documents.get(i) for i in self.document_ids ]) self.__dict__['document_list'] = obj_list return obj_list
python
def kallisto_general_stats_table(self): """ Take the parsed stats from the Kallisto report and add it to the basic stats table at the top of the report """ headers = OrderedDict() headers['fragment_length'] = { 'title': 'Frag Length', 'description': 'Estimated average fragment length', 'min': 0, 'suffix': 'bp', 'scale': 'RdYlGn' } headers['percent_aligned'] = { 'title': '% Aligned', 'description': '% processed reads that were pseudoaligned', 'max': 100, 'min': 0, 'suffix': '%', 'scale': 'YlGn' } headers['pseudoaligned_reads'] = { 'title': '{} Aligned'.format(config.read_count_prefix), 'description': 'Pseudoaligned reads ({})'.format(config.read_count_desc), 'min': 0, 'scale': 'PuRd', 'modify': lambda x: x * config.read_count_multiplier, 'shared_key': 'read_count' } self.general_stats_addcols(self.kallisto_data, headers)
java
private boolean authenticate(byte method, InputStream in, BufferedOutputStream out) throws IOException { return authenticate(method, in, out, 0L); }
python
def repr_part(self): """Return a string usable in a space's ``__repr__`` method.""" optargs = [('norm', self.norm, ''), ('exponent', self.exponent, 2.0)] return signature_string([], optargs, mod=[[], ['!r', ':.4']])
python
def _LinearMapByteStream( self, byte_stream, byte_offset=0, context=None, **unused_kwargs): """Maps a data type sequence on a byte stream. Args: byte_stream (bytes): byte stream. byte_offset (Optional[int]): offset into the byte stream where to start. context (Optional[DataTypeMapContext]): data type map context. Returns: object: mapped value. Raises: MappingError: if the data type definition cannot be mapped on the byte stream. """ members_data_size = self._data_type_definition.GetByteSize() self._CheckByteStreamSize(byte_stream, byte_offset, members_data_size) try: struct_tuple = self._operation.ReadFrom(byte_stream[byte_offset:]) struct_values = [] for attribute_index, value in enumerate(struct_tuple): data_type_map = self._data_type_maps[attribute_index] member_definition = self._data_type_definition.members[attribute_index] value = data_type_map.MapValue(value) supported_values = getattr(member_definition, 'values', None) if supported_values and value not in supported_values: raise errors.MappingError( 'Value: {0!s} not in supported values: {1:s}'.format( value, ', '.join([ '{0!s}'.format(value) for value in supported_values]))) struct_values.append(value) mapped_value = self._structure_values_class(*struct_values) except Exception as exception: error_string = ( 'Unable to read: {0:s} from byte stream at offset: {1:d} ' 'with error: {2!s}').format( self._data_type_definition.name, byte_offset, exception) raise errors.MappingError(error_string) if context: context.byte_size = members_data_size return mapped_value
python
def delete_dcnm_out_nwk(self, tenant_id, fw_dict, is_fw_virt=False): """Delete the DCNM OUT network and update the result. """ tenant_name = fw_dict.get('tenant_name') ret = self._delete_service_nwk(tenant_id, tenant_name, 'out') if ret: res = fw_const.DCNM_OUT_NETWORK_DEL_SUCCESS LOG.info("out Service network deleted for tenant %s", tenant_id) else: res = fw_const.DCNM_OUT_NETWORK_DEL_FAIL LOG.info("out Service network deleted failed for tenant %s", tenant_id) self.update_fw_db_result(tenant_id, dcnm_status=res) return ret
java
public SVGPath relativeQuadTo(double c1x, double c1y, double x, double y) { return append(PATH_QUAD_TO_RELATIVE).append(c1x).append(c1y).append(x).append(y); }
java
public static void isTrue(Boolean condition, Supplier<String> message) { if (isNotTrue(condition)) { throw new IllegalArgumentException(message.get()); } }
java
private String getBshPrompt() { if ( null != prompt ) return prompt; try { prompt = (String) eval("getBshPrompt()"); } catch ( Exception e ) { prompt = "bsh % "; } return prompt; }
java
@Override public DescribeSeverityLevelsResult describeSeverityLevels(DescribeSeverityLevelsRequest request) { request = beforeClientExecution(request); return executeDescribeSeverityLevels(request); }
python
def from_composition_and_entries(comp, entries_in_chemsys, working_ion_symbol="Li"): """ Convenience constructor to make a ConversionElectrode from a composition and all entries in a chemical system. Args: comp: Starting composition for ConversionElectrode, e.g., Composition("FeF3") entries_in_chemsys: Sequence containing all entries in a chemical system. E.g., all Li-Fe-F containing entries. working_ion_symbol: Element symbol of working ion. Defaults to Li. """ pd = PhaseDiagram(entries_in_chemsys) return ConversionElectrode.from_composition_and_pd(comp, pd, working_ion_symbol)
java
public DatabaseConnectionRequest<T, U> setConnection(String connection) { request.addParameter(ParameterBuilder.CONNECTION_KEY, connection); return this; }
java
public ClassificationModel attachLink(ClassificationModel classificationModel, LinkModel linkModel) { for (LinkModel existing : classificationModel.getLinks()) { if (StringUtils.equals(existing.getLink(), linkModel.getLink())) { return classificationModel; } } classificationModel.addLink(linkModel); return classificationModel; }
java
public synchronized void registerPrimarySsId(String address, Long ssid) throws IOException { String node = getSsIdNode(address); zkCreateRecursively(node, SerializableUtils.toBytes(ssid), true, ssid.toString()); }
python
def _max_args(self, f): """ Returns maximum number of arguments accepted by given function. """ if f.func_defaults is None: return f.func_code.co_argcount return f.func_code.co_argcount + len(f.func_defaults)
java
@Override public RequestCtx handleRequest(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { if (logger.isDebugEnabled()) { logger.debug("{}/handleRequest!", this.getClass().getName()); } String[] parts = getPathParts(request); // -/objects/{pid}/datastreams/{dsID} if (parts.length < 4) { logger.error("Not enough path components on the URI: {}", request.getRequestURI()); throw new ServletException("Not enough path components on the URI: " + request.getRequestURI()); } String mimeType = request.getParameter("mimeType"); String formatURI = request.getParameter("formatURI"); String dsLocation = request.getParameter("dsLocation"); String controlGroup = request.getParameter("controlGroup"); String dsState = request.getParameter("dsState"); String checksumType = request.getParameter("checksumType"); String checksum = request.getParameter("checksum"); // String logMessage = null; RequestCtx req = null; Map<URI, AttributeValue> actions = new HashMap<URI, AttributeValue>(); Map<URI, AttributeValue> resAttr; try { resAttr = ResourceAttributes.getResources(parts); if (mimeType != null && !mimeType.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_MIME_TYPE.getURI(), new StringAttribute(mimeType)); } if (formatURI != null && !formatURI.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_FORMAT_URI.getURI(), new AnyURIAttribute(new URI(formatURI))); } if (dsLocation != null && !dsLocation.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_LOCATION.getURI(), new AnyURIAttribute(new URI(dsLocation))); } if (controlGroup != null && !controlGroup.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_CONTROL_GROUP.getURI(), new StringAttribute(controlGroup)); } if (dsState != null && !dsState.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_STATE.getURI(), new StringAttribute(dsState)); } if (checksumType != null && !checksumType.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_CHECKSUM_TYPE.getURI(), new StringAttribute(checksumType)); } if (checksum != null && !checksum.isEmpty()) { resAttr.put(Constants.DATASTREAM.NEW_CHECKSUM.getURI(), new StringAttribute(checksum)); } actions.put(Constants.ACTION.ID.getURI(), Constants.ACTION.ADD_DATASTREAM .getStringAttribute()); actions.put(Constants.ACTION.API.getURI(), Constants.ACTION.APIM.getStringAttribute()); String dsID = parts[3]; // modifying the FeSL policy datastream requires policy management permissions if (dsID != null && dsID.equals(FedoraPolicyStore.FESL_POLICY_DATASTREAM)) { actions.put(Constants.ACTION.ID.getURI(), Constants.ACTION.MANAGE_POLICIES.getStringAttribute()); } req = getContextHandler().buildRequest(getSubjects(request), actions, resAttr, getEnvironment(request)); LogUtil.statLog(request.getRemoteUser(), Constants.ACTION.ADD_DATASTREAM.uri, parts[1], dsID); } catch (Exception e) { logger.error(e.getMessage(), e); throw new ServletException(e.getMessage(), e); } return req; }
python
def update(self, id=None, new_data={}, **kwargs): """Update an object on the server. Args: id: ID of the object to update (can be None if not required) new_data: the update data for the object **kwargs: Extra options to send to the server (e.g. sudo) Returns: dict: The new object data (*not* a RESTObject) Raises: GitlabAuthenticationError: If authentication is not correct GitlabUpdateError: If the server cannot perform the request """ super(ProjectServiceManager, self).update(id, new_data, **kwargs) self.id = id
java
public static <V> ExecutorCommand<V> executor(String circuitName) { CommandConfig commandConfig = new CommandConfig(); commandConfig.setCommandName(circuitName); return new ExecutorCommand<>(commandConfig); }
python
def get_parents(self, id_): """Gets the parents of the given ``id``. arg: id (osid.id.Id): the ``Id`` to query return: (osid.id.IdList) - the parents of the ``id`` raise: NotFound - ``id`` is not found raise: NullArgument - ``id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ id_list = [] for r in self._rls.get_relationships_by_genus_type_for_destination(id_, self._relationship_type): ident = r.get_source_id() if ident != self._phantom_root_id: id_list.append(ident) return IdList(id_list)
java
public static AssociativeArray sortAssociativeArrayByValueAscending(AssociativeArray associativeArray) { ArrayList<Map.Entry<Object, Object>> entries = new ArrayList<>(associativeArray.entrySet()); Collections.sort(entries, (Map.Entry<Object, Object> a, Map.Entry<Object, Object> b) -> { Double va = TypeInference.toDouble(a.getValue()); Double vb = TypeInference.toDouble(b.getValue()); return va.compareTo(vb); }); AssociativeArray sortedAssociativeArray = new AssociativeArray(); for (Map.Entry<Object, Object> entry : entries) { sortedAssociativeArray.put(entry.getKey(), entry.getValue()); } return sortedAssociativeArray; }
python
def _scan_line(self, line): """ Reviews each line in email message and determines fragment type line - a row of text from an email message """ is_quote_header = self.QUOTE_HDR_REGEX.match(line) is not None is_quoted = self.QUOTED_REGEX.match(line) is not None is_header = is_quote_header or self.HEADER_REGEX.match(line) is not None if self.fragment and len(line.strip()) == 0: if self.SIG_REGEX.match(self.fragment.lines[-1].strip()): self.fragment.signature = True self._finish_fragment() if self.fragment \ and ((self.fragment.headers == is_header and self.fragment.quoted == is_quoted) or (self.fragment.quoted and (is_quote_header or len(line.strip()) == 0))): self.fragment.lines.append(line) else: self._finish_fragment() self.fragment = Fragment(is_quoted, line, headers=is_header)
java
void commitTempFile(File temp) throws ConfigurationPersistenceException { if (!doneBootup.get()) { return; } if (!interactionPolicy.isReadOnly()) { FilePersistenceUtils.moveTempFileToMain(temp, mainFile); } else { FilePersistenceUtils.moveTempFileToMain(temp, lastFile); } }
python
def column(environment, book, sheet_name, sheet_source, column_source, column_key): """ Returns an array of values from column from a different dataset, ordered as the key. """ a = book.sheets[sheet_source] b = book.sheets[sheet_name] return environment.copy([a.get(**{column_key: row[column_key]})[column_source] for row in b.all()])
java
public static <T> Collector<T, List<T>> toList() { return new Collector<T, List<T>>() { @Override public List<T> collect(Stream<? extends T> stream) { return Lists.newArrayList(stream.iterator()); } }; }
java
public static Object getTopScopeValue(Scriptable scope, Object key) { scope = ScriptableObject.getTopLevelScope(scope); for (;;) { if (scope instanceof ScriptableObject) { ScriptableObject so = (ScriptableObject)scope; Object value = so.getAssociatedValue(key); if (value != null) { return value; } } scope = scope.getPrototype(); if (scope == null) { return null; } } }
java
public long getLong(String name, long defaultValue) { String valueString = getTrimmed(name); if (valueString == null) return defaultValue; String hexString = getHexDigits(valueString); if (hexString != null) { return Long.parseLong(hexString, 16); } return Long.parseLong(valueString); }
java
public int findRegion(int idx) { for (int i = 0; i < populationSize; i++) { if (subregionIdx[i][idx] == 1) { return i; } } return -1; }
java
public String getModality() { if (Event_Type.featOkTst && ((Event_Type)jcasType).casFeat_modality == null) jcasType.jcas.throwFeatMissing("modality", "de.unihd.dbs.uima.types.heideltime.Event"); return jcasType.ll_cas.ll_getStringValue(addr, ((Event_Type)jcasType).casFeatCode_modality);}
python
def resp_set_light(self, resp, color=None): """Default callback for set_color """ if color: self.color=color elif resp: self.power_level = resp.power_level self.color = resp.color self.label = resp.label.decode().replace("\x00", "")
java
public OkRequest<T> part(final String name, final String filename, final String contentType, final InputStream part) throws IOException { try { startPart(); writePartHeader(name, filename, contentType); copy(part, mOutput); } catch (IOException ex) { throw ex; } catch (Exception ex) { VolleyLog.e(ex, "error on part"); } return this; }
python
def get_line_rules(declarations): """ Given a list of declarations, return a list of output.Rule objects. This function is wise to line-<foo>, inline-<foo>, and outline-<foo> properties, and will generate multiple LineSymbolizers if necessary. """ property_map = {'line-color': 'stroke', 'line-width': 'stroke-width', 'line-opacity': 'stroke-opacity', 'line-join': 'stroke-linejoin', 'line-cap': 'stroke-linecap', 'line-dasharray': 'stroke-dasharray', 'line-meta-output': 'meta-output', 'line-meta-writer': 'meta-writer'} property_names = property_map.keys() # prepend parameter names with 'in' and 'out' for i in range(len(property_names)): property_names.append('in' + property_names[i]) property_names.append('out' + property_names[i]) # a place to put rules rules = [] for (filter, values) in filtered_property_declarations(declarations, property_names): width = values.has_key('line-width') and values['line-width'].value color = values.has_key('line-color') and values['line-color'].value opacity = values.has_key('line-opacity') and values['line-opacity'].value or None join = values.has_key('line-join') and values['line-join'].value or None cap = values.has_key('line-cap') and values['line-cap'].value or None dashes = values.has_key('line-dasharray') and values['line-dasharray'].value or None line_symbolizer = color and width and output.LineSymbolizer(color, width, opacity, join, cap, dashes) or False width = values.has_key('inline-width') and values['inline-width'].value color = values.has_key('inline-color') and values['inline-color'].value opacity = values.has_key('inline-opacity') and values['inline-opacity'].value or None join = values.has_key('inline-join') and values['inline-join'].value or None cap = values.has_key('inline-cap') and values['inline-cap'].value or None dashes = values.has_key('inline-dasharray') and values['inline-dasharray'].value or None inline_symbolizer = color and width and output.LineSymbolizer(color, width, opacity, join, cap, dashes) or False # outline requires regular line to have a meaningful width width = values.has_key('outline-width') and values.has_key('line-width') \ and values['line-width'].value + values['outline-width'].value * 2 color = values.has_key('outline-color') and values['outline-color'].value opacity = values.has_key('outline-opacity') and values['outline-opacity'].value or None join = values.has_key('outline-join') and values['outline-join'].value or None cap = values.has_key('outline-cap') and values['outline-cap'].value or None dashes = values.has_key('outline-dasharray') and values['outline-dasharray'].value or None outline_symbolizer = color and width and output.LineSymbolizer(color, width, opacity, join, cap, dashes) or False if outline_symbolizer or line_symbolizer or inline_symbolizer: rules.append(make_rule(filter, outline_symbolizer, line_symbolizer, inline_symbolizer)) return rules
java
public List<I_CmsFormatterBean> getDisplayFormatters() { if (m_displayFormatters == null) { List<I_CmsFormatterBean> formatters = new ArrayList<I_CmsFormatterBean>( Collections2.filter(m_allFormatters, new IsDisplay())); if (formatters.size() > 1) { Collections.sort(formatters, new Comparator<I_CmsFormatterBean>() { public int compare(I_CmsFormatterBean o1, I_CmsFormatterBean o2) { return o1.getRank() == o2.getRank() ? 0 : (o1.getRank() < o2.getRank() ? -1 : 1); } }); } m_displayFormatters = Collections.unmodifiableList(formatters); } return m_displayFormatters; }
java
@Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BpsimPackage.PRIORITY_PARAMETERS__INTERRUPTIBLE: setInterruptible((Parameter)newValue); return; case BpsimPackage.PRIORITY_PARAMETERS__PRIORITY: setPriority((Parameter)newValue); return; } super.eSet(featureID, newValue); }
java
@SuppressWarnings("deprecation") public boolean isInheritedContainer(CmsObject cms) throws CmsException { if (m_resource == null) { initResource(cms); } return OpenCms.getResourceManager().getResourceType( CmsResourceTypeXmlContainerPage.INHERIT_CONTAINER_TYPE_NAME).getTypeId() == m_resource.getTypeId(); }
java
public static void encodeDropMenuStart(DropMenu c, ResponseWriter rw, String l) throws IOException { rw.startElement("ul", c); if (c.getContentClass() != null) rw.writeAttribute("class", "dropdown-menu " + c.getContentClass(), "class"); else rw.writeAttribute("class", "dropdown-menu", "class"); if (null != c.getContentStyle()) rw.writeAttribute("style", c.getContentStyle(), "style"); rw.writeAttribute("role", "menu", null); rw.writeAttribute("aria-labelledby", l, null); }