language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def implied_timescales(sequences, lag_times, n_timescales=10, msm=None, n_jobs=1, verbose=0): """ Calculate the implied timescales for a given MSM. Parameters ---------- sequences : list of array-like List of sequences, or a single sequence. Each sequence should be a 1D iterable of state labels. Labels can be integers, strings, or other orderable objects. lag_times : array-like Lag times to calculate implied timescales at. n_timescales : int, optional Number of timescales to calculate. msm : msmbuilder.msm.MarkovStateModel, optional Instance of an MSM to specify parameters other than the lag time. If None, then the default parameters (as implemented by msmbuilder.msm.MarkovStateModel) will be used. n_jobs : int, optional Number of jobs to run in parallel Returns ------- timescales : np.ndarray, shape = [n_models, n_timescales] The slowest timescales (in units of lag times) for each model. """ if msm is None: msm = MarkovStateModel() param_grid = {'lag_time' : lag_times} models = param_sweep(msm, sequences, param_grid, n_jobs=n_jobs, verbose=verbose) timescales = [m.timescales_ for m in models] n_timescales = min(n_timescales, min(len(ts) for ts in timescales)) timescales = np.array([ts[:n_timescales] for ts in timescales]) return timescales
java
@Override public Long append(final String key, final String value) { return this.d_append(key, value).getResult(); }
java
public static ByteBuffer asByteBuffer(String expression, Node node) throws XPathExpressionException { return asByteBuffer(expression, node, xpath()); }
java
@Override public void generatedFile(final String fileName, final int current, final int total) { if (log.isDebugEnabled()) { log.debug("Processing file [" + current + "/" + total + "]: " + fileName); } }
java
@NonNull private static List<Point> coordAll(@NonNull List<Point> coords, @NonNull Point point) { coords.add(point); return coords; }
java
@Override public File findMostRecentSnapshot() throws IOException { List<File> files = findNValidSnapshots(1); if (files.size() == 0) { return null; } return files.get(0); }
java
@TargetApi(Build.VERSION_CODES.LOLLIPOP) public static String optString(@Nullable Bundle bundle, @Nullable String key, @Nullable String fallback) { if (bundle == null) { return fallback; } return bundle.getString(key, fallback); }
java
public static void generateCylinder(TFloatList positions, TFloatList normals, TIntList indices, float radius, float height) { // 0,0,0 will be halfway up the cylinder in the middle final float halfHeight = height / 2; // The positions at the rims of the cylinders final List<Vector3f> rims = new ArrayList<>(); for (int angle = 0; angle < 360; angle += 15) { final double angleRads = Math.toRadians(angle); rims.add(new Vector3f( radius * TrigMath.cos(angleRads), halfHeight, radius * -TrigMath.sin(angleRads))); } // The normals for the triangles of the top and bottom faces final Vector3f topNormal = new Vector3f(0, 1, 0); final Vector3f bottomNormal = new Vector3f(0, -1, 0); // Add the top and bottom face center vertices addVector(positions, new Vector3f(0, halfHeight, 0));// 0 addVector(normals, topNormal); addVector(positions, new Vector3f(0, -halfHeight, 0));// 1 addVector(normals, bottomNormal); // Add all the faces section by section, turning around the y axis final int rimsSize = rims.size(); for (int i = 0; i < rimsSize; i++) { // Get the top and bottom vertex positions and the side normal final Vector3f t = rims.get(i); final Vector3f b = new Vector3f(t.getX(), -t.getY(), t.getZ()); final Vector3f n = new Vector3f(t.getX(), 0, t.getZ()).normalize(); // Top face vertex addVector(positions, t);// index addVector(normals, topNormal); // Bottom face vertex addVector(positions, b);// index + 1 addVector(normals, bottomNormal); // Side top vertex addVector(positions, t);// index + 2 addVector(normals, n); // Side bottom vertex addVector(positions, b);// index + 3 addVector(normals, n); // Get the current index for our vertices final int currentIndex = i * 4 + 2; // Get the index for the next iteration, wrapping around at the end final int nextIndex = (i == rimsSize - 1 ? 0 : i + 1) * 4 + 2; // Add the 4 triangles (1 top, 1 bottom, 2 for the side) addAll(indices, 0, currentIndex, nextIndex); addAll(indices, 1, nextIndex + 1, currentIndex + 1); addAll(indices, currentIndex + 2, currentIndex + 3, nextIndex + 2); addAll(indices, currentIndex + 3, nextIndex + 3, nextIndex + 2); } }
python
def get_current_shutit_pexpect_session(self, note=None): """Returns the currently-set default pexpect child. @return: default shutit pexpect child object """ self.handle_note(note) res = self.current_shutit_pexpect_session self.handle_note_after(note) return res
java
public Interval withStart(ReadableInstant start) { long startMillis = DateTimeUtils.getInstantMillis(start); return withStartMillis(startMillis); }
java
public void onDrawFrame(float frameTime) { GVRSceneObject owner = getOwnerObject(); if (owner == null) { return; } if (mSwitchIndex < 0 || mSwitchIndex > owner.rawGetChildren().size()) { return; } int i = 0; List<GVRSceneObject> children = owner.rawGetChildren(); for (GVRSceneObject child : children) { child.setEnable(i++ == mSwitchIndex); } }
python
def _initialize(self, **resource_attributes): """ Initialize the collection. :param resource_attributes: API resource parameters """ super(APIResourceCollection, self)._initialize(**resource_attributes) dict_list = self.data self.data = [] for resource in dict_list: self.data.append(self._expected_api_resource(**resource))
python
def _find_from_file(full_doc, from_file_keyword): """ Finds a line in <full_doc> like <from_file_keyword> <colon> <path> and return path """ path = None for line in full_doc.splitlines(): if from_file_keyword in line: parts = line.strip().split(':') if len(parts) == 2 and parts[0].strip() == from_file_keyword: path = parts[1].strip() break return path
java
protected final Stringifier<Long> registerLongStringifier(String name, Stringifier<Long> longStringifier) { compositeStringifier.add(Long.class, name, longStringifier); compositeStringifier.add(Long.TYPE, name, longStringifier); return longStringifier; }
python
def backend_fields(self, fields): '''Return a two elements tuple containing a list of fields names and a list of field attribute names.''' dfields = self.dfields processed = set() names = [] atts = [] pkname = self.pkname() for name in fields: if name == pkname or name in processed: continue elif name in dfields: processed.add(name) field = dfields[name] names.append(field.name) atts.append(field.attname) else: bname = name.split(JSPLITTER)[0] if bname in dfields: field = dfields[bname] if field.type in ('json object', 'related object'): processed.add(name) names.append(name) atts.append(name) return names, atts
python
def graph(self): """ Returns MultiDiGraph from kihs. Nodes are helices and edges are kihs. """ g = networkx.MultiDiGraph() edge_list = [(x.knob_helix, x.hole_helix, x.id, {'kih': x}) for x in self.get_monomers()] g.add_edges_from(edge_list) return g
python
def to_syllables_with_trailing_spaces(line: str, syllables: List[str]) -> List[str]: """ Given a line of syllables and spaces, and a list of syllables, produce a list of the syllables with trailing spaces attached as approriate. :param line: :param syllables: :return: >>> to_syllables_with_trailing_spaces(' arma virumque cano ', ... ['ar', 'ma', 'vi', 'rum', 'que', 'ca', 'no' ]) [' ar', 'ma ', 'vi', 'rum', 'que ', 'ca', 'no '] """ syllabs_spaces = [] idx = 0 linelen = len(line) for position, syl in enumerate(syllables): start = line.index(syl, idx) idx = start + len(syl) if position == 0 and start > 0: # line starts with punctuation, substituted w/ spaces syl = (start * " ") + syl if idx + 1 > len(line): syllabs_spaces.append(syl) return syllabs_spaces nextchar = line[idx] if nextchar != " ": syllabs_spaces.append(syl) continue else: tmpidx = idx while tmpidx < linelen and nextchar == " ": syl += " " tmpidx += 1 if tmpidx == linelen: syllabs_spaces.append(syl) return syllabs_spaces nextchar = line[tmpidx] idx = tmpidx - 1 syllabs_spaces.append(syl) return syllabs_spaces
python
def from_dict(cls, d): """ Reconstructs the StructureEnvironments object from a dict representation of the StructureEnvironments created using the as_dict method. :param d: dict representation of the StructureEnvironments object :return: StructureEnvironments object """ ce_list = [None if (ce_dict == 'None' or ce_dict is None) else { int(cn): [None if (ced is None or ced == 'None') else ChemicalEnvironments.from_dict(ced) for ced in ce_dict[cn]] for cn in ce_dict} for ce_dict in d['ce_list']] voronoi = DetailedVoronoiContainer.from_dict(d['voronoi']) structure = Structure.from_dict(d['structure']) neighbors_sets = [{int(cn): [cls.NeighborsSet.from_dict(dd=nb_set_dict, structure=structure, detailed_voronoi=voronoi) for nb_set_dict in nb_sets] for cn, nb_sets in site_nbs_sets_dict.items()} if site_nbs_sets_dict is not None else None for site_nbs_sets_dict in d['neighbors_sets']] info = {key: val for key, val in d['info'].items() if key not in ['sites_info']} if 'sites_info' in d['info']: info['sites_info'] = [{'nb_sets_info': {int(cn): {int(inb_set): nb_set_info for inb_set, nb_set_info in cn_sets.items()} for cn, cn_sets in site_info['nb_sets_info'].items()}, 'time': site_info['time']} if 'nb_sets_info' in site_info else {} for site_info in d['info']['sites_info']] return cls(voronoi=voronoi, valences=d['valences'], sites_map=d['sites_map'], equivalent_sites=[[PeriodicSite.from_dict(psd) for psd in psl] for psl in d['equivalent_sites']], ce_list=ce_list, structure=structure, neighbors_sets=neighbors_sets, info=info)
java
public void displayDialog() throws Exception { Map<String, String[]> params = initAdminTool(); // explorer view dialogs if (CmsExplorerDialog.EXPLORER_TOOLS.contains(getCurrentToolPath())) { if (getAction() == CmsDialog.ACTION_CANCEL) { actionCloseDialog(); return; } getToolManager().jspForwardPage(this, CmsToolManager.ADMINVIEW_ROOT_LOCATION + "/tool-fs.jsp", params); return; } // real tool if (!getAdminTool().getHandler().getLink().equals(getCms().getRequestContext().getUri())) { getToolManager().jspForwardPage(this, getAdminTool().getHandler().getLink(), params); return; } // just grouping if (getAction() == CmsDialog.ACTION_CANCEL) { actionCloseDialog(); return; } JspWriter out = getJsp().getJspContext().getOut(); out.print(htmlStart()); out.print(bodyStart(null)); out.print(dialogStart()); out.print(dialogContentStart(getParamTitle())); out.print(dialogContentEnd()); out.print(dialogEnd()); out.print(bodyEnd()); out.print(htmlEnd()); }
java
public static Observable<? extends AVUser> logIn(String username, String password) { return logIn(username, password, internalUserClazz()); }
python
def get_table_name(self, ind): """ Return both the table_name (i.e., 'specimens') and the col_name (i.e., 'specimen') for a given index in self.ancestry. """ if ind >= len(self.ancestry): return "", "" if ind > -1: table_name = self.ancestry[ind] ###name = table_name[:-1] + "_name" name = table_name[:-1] return table_name, name return "", ""
python
def cal_continue(self, list_data): """ 計算持續天數 :rtype: int :returns: 向量數值:正數向上、負數向下。 """ diff_data = [] for i in range(1, len(list_data)): if list_data[-i] > list_data[-i - 1]: diff_data.append(1) else: diff_data.append(-1) cont = 0 for value in diff_data: if value == diff_data[0]: cont += 1 else: break return cont * diff_data[0]
python
def flatten(data): """Returns a flattened version of a list. Courtesy of https://stackoverflow.com/a/12472564 Args: data (`tuple` or `list`): Input data Returns: `list` """ if not data: return data if type(data[0]) in (list, tuple): return list(flatten(data[0])) + list(flatten(data[1:])) return list(data[:1]) + list(flatten(data[1:]))
python
def _get_group_from_state(self, sid): """ Args: sid (int): The state identifier Return: int: The group identifier that the state belongs """ for index, selectgroup in enumerate(self.groups): if sid in selectgroup: return index
java
public void init(String st, int minsize, int maxsize) { super.init(buildString(st.trim(), minsize), minsize, maxsize); }
java
public VariantMetadata loadPedigree(Pedigree pedigree, String studyId) { VariantStudyMetadata variantStudyMetadata = getVariantStudyMetadata(studyId); if (variantStudyMetadata != null) { boolean found; org.opencb.biodata.models.metadata.Individual dest = null; for (Member src: pedigree.getMembers()) { found = false; for (int i = 0; i < variantStudyMetadata.getIndividuals().size(); i++) { dest = variantStudyMetadata.getIndividuals().get(i); if (dest.getId().equals(src.getName())) { found = true; break; } } if (found) { dest.setFamily(pedigree.getName()); dest.setFather(src.getFather() != null ? src.getFather().getName() : null); dest.setMother(src.getMother() != null ? src.getMother().getName() : null); dest.setSex(src.getSex().toString()); dest.setPhenotype(src.getAffectionStatus().toString()); Sample sample = null; // sanity check if (dest.getSamples() == null) { logger.warn("Loading pedigree, individual {} without samples: it will be added.", dest.getId()); dest.setSamples(new ArrayList<>()); } for (Sample s: dest.getSamples()) { if (s.getId().equals(dest.getId())) { sample = s; break; } } // sample not found, add as a new one if (sample == null) { sample = new Sample(); sample.setId(dest.getId()); sample.setAnnotations(new HashMap<>()); dest.getSamples().add(sample); } if (sample.getAnnotations() == null) { sample.setAnnotations(new HashMap<>()); } // Default annotation (attributes from Individual) sample.getAnnotations().put(INDIVIDUAL_ID, src.getName()); sample.getAnnotations().put(INDIVIDUAL_FAMILY, pedigree.getName()); if (src.getFather() != null) { sample.getAnnotations().put(INDIVIDUAL_FATHER, src.getFather().getName()); } if (src.getMother() != null) { sample.getAnnotations().put(INDIVIDUAL_MOTHER, src.getMother().getName()); } if (src.getSex() != null) { sample.getAnnotations().put(INDIVIDUAL_SEX, src.getSex().toString()); } if (src.getAffectionStatus() != null) { sample.getAnnotations().put(INDIVIDUAL_PHENOTYPE, src.getAffectionStatus().toString()); } if (src.getAttributes() != null && src.getAttributes().size() > 0) { // Custom annotation for (String key : src.getAttributes().keySet()) { if (pedigree.getAttributes().get(key) != null) { sample.getAnnotations().put(key, src.getAttributes().get(key).toString()); } } } } else { logger.warn("Loading pedigree, individual {} not found in metadata file, it will not be added.", src.getName()); } } } else { logger.warn("Loading pedigree, nothing to do because study ID '{}' does not exist.", studyId); } return variantMetadata; }
python
def descendants(self, node): """ Returns a :class:`QuerySet` with all descendants for a given :class:`CTENode` `node`. :param node: the :class:`CTENode` whose descendants are required. :returns: A :class:`QuerySet` with all descendants of the given `node`. """ # We need to read the _cte_node_* attributes, so ensure they exist. self._ensure_parameters() # This is implemented in the CTE WHERE logic, so we pass a reference to # the offset CTENode to the custom QuerySet, which will process it. # Because the compiler will include the node in question in the offset, # we must exclude it here. return CTEQuerySet(self.model, using=self._db, offset=node).exclude(pk=node.pk)
java
protected void createLibraryHandlingGroup(Composite parent) { fLibraryHandlingGroup= new Composite(parent, SWT.NONE); GridLayout layout= new GridLayout(); fLibraryHandlingGroup.setLayout(layout); fLibraryHandlingGroup.setLayoutData(new GridData(GridData.HORIZONTAL_ALIGN_FILL | GridData.VERTICAL_ALIGN_FILL | GridData.GRAB_HORIZONTAL)); createLabel(fLibraryHandlingGroup, FatJarPackagerMessages.FatJarPackageWizardPage_libraryHandlingGroupTitle, false); fExtractJarsRadioButton= new Button(fLibraryHandlingGroup, SWT.RADIO | SWT.LEFT); fExtractJarsRadioButton.setText(FatJarPackagerMessages.FatJarPackageWizardPage_extractJars_text); fExtractJarsRadioButton.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); fExtractJarsRadioButton.addListener(SWT.Selection, new Listener() { @Override public void handleEvent(Event event) { if (((Button)event.widget).getSelection()) fLibraryHandler= new ExtractLibraryHandler(); } }); fPackageJarsRadioButton= new Button(fLibraryHandlingGroup, SWT.RADIO | SWT.LEFT); fPackageJarsRadioButton.setText(FatJarPackagerMessages.FatJarPackageWizardPage_packageJars_text); fPackageJarsRadioButton.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); fPackageJarsRadioButton.addListener(SWT.Selection, new Listener() { @Override public void handleEvent(Event event) { if (((Button)event.widget).getSelection()) fLibraryHandler= new PackageLibraryHandler(); } }); fCopyJarFilesRadioButton= new Button(fLibraryHandlingGroup, SWT.RADIO | SWT.LEFT); fCopyJarFilesRadioButton.setText(FatJarPackagerMessages.FatJarPackageWizardPage_copyJarFiles_text); fCopyJarFilesRadioButton.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); fCopyJarFilesRadioButton.addListener(SWT.Selection, new Listener() { @Override public void handleEvent(Event event) { if (((Button)event.widget).getSelection()) fLibraryHandler= new CopyLibraryHandler(); } }); // set default for first selection (no previous widget settings to restore) setLibraryHandler(new ExtractLibraryHandler()); }
python
def conditional_jit(function=None, **kwargs): # noqa: D202 """Use numba's jit decorator if numba is installed. Notes ----- If called without arguments then return wrapped function. @conditional_jit def my_func(): return else called with arguments @conditional_jit(nopython=True) def my_func(): return """ def wrapper(function): try: numba = importlib.import_module("numba") return numba.jit(**kwargs)(function) except ImportError: return function if function: return wrapper(function) else: return wrapper
python
def postActivate_(self): """Whatever you need to do after creating the shmem client """ if (self.requiredGPU_MB(self.required_mb)): self.analyzer = YoloV3TinyAnalyzer(verbose = self.verbose) else: self.warning_message = "WARNING: not enough GPU memory!" self.analyzer = None
python
def _to_dict(objects): ''' Potentially interprets a string as JSON for usage with mongo ''' try: if isinstance(objects, six.string_types): objects = salt.utils.json.loads(objects) except ValueError as err: log.error("Could not parse objects: %s", err) raise err return objects
python
def private_config_file(self): """ Returns the private-config file for this IOU VM. :returns: path to config file. None if the file doesn't exist """ path = os.path.join(self.working_dir, 'private-config.cfg') if os.path.exists(path): return path else: return None
java
public Observable<ManagementPolicyInner> createOrUpdateAsync(String resourceGroupName, String accountName, ManagementPolicySchema policy) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, policy).map(new Func1<ServiceResponse<ManagementPolicyInner>, ManagementPolicyInner>() { @Override public ManagementPolicyInner call(ServiceResponse<ManagementPolicyInner> response) { return response.body(); } }); }
python
def explained_variance(returns, values): """ Calculate how much variance in returns do the values explain """ exp_var = 1 - torch.var(returns - values) / torch.var(returns) return exp_var.item()
python
def query(self,sql): """ Execute an SQL query on the server and fetch the resulting XML file back. @return: message received (may be empty) from LDBD Server as a string """ msg = "QUERY\0" + sql + "\0" self.sfile.write(msg) ret, output = self.__response__() reply = str(output[0]) if ret: msg = "Error executing query on server %d:%s" % (ret, reply) raise LDBDClientException, msg return reply
python
def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" try: user_data = super().user_data(access_token, *args, **kwargs) if not user_data.get('email'): raise AuthFailed(self, _('You must have a public email configured in GitHub. ' 'Goto Settings/Profile and choose your public email')) except AuthFailed: raise AuthFailed(self, _('Sorry, you do not seem to be a public member of %s') % self.setting('NAME')) return user_data
java
public String buildJson() throws ODataRenderException { LOG.debug("Start building Json service root document"); try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { JsonGenerator jsonGenerator = JSON_FACTORY.createGenerator(stream, JsonEncoding.UTF8); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField(CONTEXT, getContextURL(uri, entityDataModel)); jsonGenerator.writeArrayFieldStart(VALUE); List<EntitySet> entities = entityDataModel.getEntityContainer().getEntitySets(); for (EntitySet entity : entities) { if (entity.isIncludedInServiceDocument()) { writeObject(jsonGenerator, entity); } } List<Singleton> singletons = entityDataModel.getEntityContainer().getSingletons(); for (Singleton singleton : singletons) { writeObject(jsonGenerator, singleton); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); jsonGenerator.close(); return stream.toString(StandardCharsets.UTF_8.name()); } catch (IOException e) { throw new ODataRenderException("It is unable to render service document", e); } }
java
public GetDatabaseOperation buildGetDatabaseOperation(String databasePath, String key, String name) { return new GetDatabaseOperation(getOperationFactory(), databasePath, key, name); }
java
protected static String makeComponentPrefix(String tokenName, String className) { String simpleClassName = className; if (null != className) { int lastDotIdx = className.lastIndexOf("."); if (lastDotIdx > -1) { simpleClassName = className.substring(lastDotIdx + 1, className.length()); } } return tokenName + "." + simpleClassName + "."; }
python
def compute_correlation(matrix1, matrix2, return_nans=False): """compute correlation between two sets of variables Correlate the rows of matrix1 with the rows of matrix2. If matrix1 == matrix2, it is auto-correlation computation resulting in a symmetric correlation matrix. The number of columns MUST agree between set1 and set2. The correlation being computed here is the Pearson's correlation coefficient, which can be expressed as .. math:: corr(X, Y) = \\frac{cov(X, Y)}{\\sigma_X\\sigma_Y} where cov(X, Y) is the covariance of variable X and Y, and .. math:: \\sigma_X is the standard deviation of variable X Reducing the correlation computation to matrix multiplication and using BLAS GEMM API wrapped by Scipy can speedup the numpy built-in correlation computation (numpy.corrcoef) by one order of magnitude .. math:: corr(X, Y) &= \\frac{\\sum\\limits_{i=1}^n (x_i-\\bar{x})(y_i-\\bar{y})}{(n-1) \\sqrt{\\frac{\\sum\\limits_{j=1}^n x_j^2-n\\bar{x}}{n-1}} \\sqrt{\\frac{\\sum\\limits_{j=1}^{n} y_j^2-n\\bar{y}}{n-1}}}\\\\ &= \\sum\\limits_{i=1}^n(\\frac{(x_i-\\bar{x})} {\\sqrt{\\sum\\limits_{j=1}^n x_j^2-n\\bar{x}}} \\frac{(y_i-\\bar{y})}{\\sqrt{\\sum\\limits_{j=1}^n y_j^2-n\\bar{y}}}) By default (return_nans=False), returns zeros for vectors with NaNs. If return_nans=True, convert zeros to NaNs (np.nan) in output. Parameters ---------- matrix1: 2D array in shape [r1, c] MUST be continuous and row-major matrix2: 2D array in shape [r2, c] MUST be continuous and row-major return_nans: bool, default:False If False, return zeros for NaNs; if True, return NaNs Returns ------- corr_data: 2D array in shape [r1, r2] continuous and row-major in np.float32 """ matrix1 = matrix1.astype(np.float32) matrix2 = matrix2.astype(np.float32) [r1, d1] = matrix1.shape [r2, d2] = matrix2.shape if d1 != d2: raise ValueError('Dimension discrepancy') # preprocess two components matrix1 = _normalize_for_correlation(matrix1, 1, return_nans=return_nans) matrix2 = _normalize_for_correlation(matrix2, 1, return_nans=return_nans) corr_data = np.empty((r1, r2), dtype=np.float32, order='C') # blas routine is column-major blas.compute_single_matrix_multiplication('T', 'N', r2, r1, d1, 1.0, matrix2, d2, matrix1, d1, 0.0, corr_data, r2) return corr_data
java
public void configConstant(Constants me) { //load properties this.loadPropertyFile(); me.setViewType(ViewType.JSP); me.setDevMode(this.getAppDevMode()); me.setEncoding(Const.DEFAULT_ENCODING); me.setError404View(PageViewKit.get404PageView()); me.setError500View(PageViewKit.get500PageView()); me.setError403View(PageViewKit.get403PageView()); JFinalConfigExt.APP_NAME = this.getAppName(); // config others configMoreConstants(me); }
java
public void addExcludedlaceId(final String placeId) { if (null == this.countryCodes) { this.excludePlaceIds = new ArrayList<String>(); } this.excludePlaceIds.add(placeId); }
java
@Override public Document parseXML(String string) { try { return loadXML(new ByteArrayInputStream(string.getBytes("UTF-8"))); } catch (UnsupportedEncodingException e) { throw new BugError("JVM with missing support for UTF-8."); } }
python
def parse_numeric(self): """Tokenize a Fortran numerical value.""" word = '' frac = False if self.char == '-': word += self.char self.update_chars() while self.char.isdigit() or (self.char == '.' and not frac): # Only allow one decimal point if self.char == '.': frac = True word += self.char self.update_chars() # Check for float exponent if self.char in 'eEdD': word += self.char self.update_chars() if self.char in '+-': word += self.char self.update_chars() while self.char.isdigit(): word += self.char self.update_chars() return word
java
@Override public UpdateGatewayGroupResult updateGatewayGroup(UpdateGatewayGroupRequest request) { request = beforeClientExecution(request); return executeUpdateGatewayGroup(request); }
java
private String getUserDn(String callerName, String filter, SearchControls controls) { String userDn = null; String searchBase = idStoreDefinition.getCallerSearchBase(); if (searchBase == null || searchBase.isEmpty()) { userDn = idStoreDefinition.getCallerNameAttribute() + "=" + callerName + "," + idStoreDefinition.getCallerBaseDn(); } else { DirContext ctx = null; try { ctx = bind(); } catch (NamingException e) { Tr.error(tc, "JAVAEESEC_ERROR_EXCEPTION_ON_BIND", new Object[] { this.idStoreDefinition.getBindDn(), e }); throw new IllegalStateException(e); } try { if (tc.isDebugEnabled()) { Tr.debug(tc, "JNDI_CALL search", new Object[] { searchBase, filter, printControls(controls) }); } NamingEnumeration<SearchResult> ne = ctx.search(new LdapName(searchBase), filter, controls); if (ne.hasMoreElements()) { userDn = ne.nextElement().getNameInNamespace(); if (ne.hasMoreElements()) { Tr.warning(tc, "JAVAEESEC_WARNING_MULTI_CALLER_LDAP", new Object[] { callerName, filter, searchBase }); return null; } } } catch (NamingException e) { Tr.error(tc, "JAVAEESEC_ERROR_EXCEPTION_ON_SEARCH", new Object[] { callerName, filter, searchBase, e }); throw new IllegalStateException(e); } } return userDn; }
java
public static TimerJobEntity createTimerEntityForTimerEventDefinition(TimerEventDefinition timerEventDefinition, boolean isInterruptingTimer, ExecutionEntity executionEntity, String jobHandlerType, String jobHandlerConfig) { ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); String businessCalendarRef = null; Expression expression = null; ExpressionManager expressionManager = processEngineConfiguration.getExpressionManager(); // ACT-1415: timer-declaration on start-event may contain expressions NOT // evaluating variables but other context, evaluating should happen nevertheless VariableScope scopeForExpression = executionEntity; if (scopeForExpression == null) { scopeForExpression = NoExecutionVariableScope.getSharedInstance(); } if (StringUtils.isNotEmpty(timerEventDefinition.getTimeDate())) { businessCalendarRef = DueDateBusinessCalendar.NAME; expression = expressionManager.createExpression(timerEventDefinition.getTimeDate()); } else if (StringUtils.isNotEmpty(timerEventDefinition.getTimeCycle())) { businessCalendarRef = CycleBusinessCalendar.NAME; expression = expressionManager.createExpression(timerEventDefinition.getTimeCycle()); } else if (StringUtils.isNotEmpty(timerEventDefinition.getTimeDuration())) { businessCalendarRef = DurationBusinessCalendar.NAME; expression = expressionManager.createExpression(timerEventDefinition.getTimeDuration()); } if (StringUtils.isNotEmpty(timerEventDefinition.getCalendarName())) { businessCalendarRef = timerEventDefinition.getCalendarName(); Expression businessCalendarExpression = expressionManager.createExpression(businessCalendarRef); businessCalendarRef = businessCalendarExpression.getValue(scopeForExpression).toString(); } if (expression == null) { throw new ActivitiException("Timer needs configuration (either timeDate, timeCycle or timeDuration is needed) (" + timerEventDefinition.getId() + ")"); } BusinessCalendar businessCalendar = processEngineConfiguration.getBusinessCalendarManager().getBusinessCalendar(businessCalendarRef); String dueDateString = null; Date duedate = null; Object dueDateValue = expression.getValue(scopeForExpression); if (dueDateValue instanceof String) { dueDateString = (String) dueDateValue; } else if (dueDateValue instanceof Date) { duedate = (Date) dueDateValue; } else if (dueDateValue instanceof DateTime) { //JodaTime support duedate = ((DateTime) dueDateValue).toDate(); } else if (dueDateValue != null) { throw new ActivitiException("Timer '" + executionEntity.getActivityId() + "' was not configured with a valid duration/time, either hand in a java.util.Date or a String in format 'yyyy-MM-dd'T'hh:mm:ss'"); } if (duedate == null && dueDateString != null) { duedate = businessCalendar.resolveDuedate(dueDateString); } TimerJobEntity timer = null; if (duedate != null) { timer = Context.getCommandContext().getTimerJobEntityManager().create(); timer.setJobType(JobEntity.JOB_TYPE_TIMER); timer.setRevision(1); timer.setJobHandlerType(jobHandlerType); timer.setJobHandlerConfiguration(jobHandlerConfig); timer.setExclusive(true); timer.setRetries(processEngineConfiguration.getAsyncExecutorNumberOfRetries()); timer.setDuedate(duedate); if (executionEntity != null) { timer.setExecution(executionEntity); timer.setProcessDefinitionId(executionEntity.getProcessDefinitionId()); timer.setProcessInstanceId(executionEntity.getProcessInstanceId()); // Inherit tenant identifier (if applicable) if (executionEntity.getTenantId() != null) { timer.setTenantId(executionEntity.getTenantId()); } } } if (StringUtils.isNotEmpty(timerEventDefinition.getTimeCycle())) { // See ACT-1427: A boundary timer with a cancelActivity='true', doesn't need to repeat itself boolean repeat = !isInterruptingTimer; // ACT-1951: intermediate catching timer events shouldn't repeat according to spec if (executionEntity != null) { FlowElement currentElement = executionEntity.getCurrentFlowElement(); if (currentElement instanceof IntermediateCatchEvent) { repeat = false; } } if (repeat) { String prepared = prepareRepeat(dueDateString); timer.setRepeat(prepared); } } if (timer != null && executionEntity != null) { timer.setExecution(executionEntity); timer.setProcessDefinitionId(executionEntity.getProcessDefinitionId()); // Inherit tenant identifier (if applicable) if (executionEntity.getTenantId() != null) { timer.setTenantId(executionEntity.getTenantId()); } } return timer; }
java
public String getTypeGeneric(boolean includeBrackets) { if (isTypeGeneric() == false) { return ""; } String result = typeGenericName[0] + typeGenericExtends[0]; if (typeGenericExtends.length > 1) { result += ", " + typeGenericName[1] + typeGenericExtends[1]; if (typeGenericExtends.length > 2) { result += ", " + typeGenericName[2] + typeGenericExtends[2]; } } return includeBrackets && result.length() > 0 ? '<' + result + '>' : result; }
java
public void registerType(ModelElementType modelElementType, Class<? extends ModelElementInstance> instanceType) { QName qName = ModelUtil.getQName(modelElementType.getTypeNamespace(), modelElementType.getTypeName()); typesByName.put(qName, modelElementType); typesByClass.put(instanceType, modelElementType); }
python
def unflatten(flat_dict, separator='_'): """ Creates a hierarchical dictionary from a flattened dictionary Assumes no lists are present :param flat_dict: a dictionary with no hierarchy :param separator: a string that separates keys :return: a dictionary with hierarchy """ _unflatten_asserts(flat_dict, separator) # This global dictionary is mutated and returned unflattened_dict = dict() def _unflatten(dic, keys, value): for key in keys[:-1]: dic = dic.setdefault(key, {}) dic[keys[-1]] = value for item in flat_dict: _unflatten(unflattened_dict, item.split(separator), flat_dict[item]) return unflattened_dict
java
public GetSecurityPolicyResponse getSecurityPolicy(GetSecurityPolicyRequest request) { checkNotNull(request, "The parameter request should NOT be null."); checkStringNotEmpty(request.getName(), "The parameter name should NOT be null or empty string."); InternalRequest internalRequest = createRequest(HttpMethodName.GET, request, LIVE_SECURITY_POLICY, request.getName()); return invokeHttpClient(internalRequest, GetSecurityPolicyResponse.class); }
java
static void setTheme(Context context, AttributeSet attrs) { boolean nightModeEnabled = isNightModeEnabled(context); if (shouldSetThemeFromPreferences(context)) { int prefLightTheme = retrieveThemeResIdFromPreferences(context, NavigationConstants.NAVIGATION_VIEW_LIGHT_THEME); int prefDarkTheme = retrieveThemeResIdFromPreferences(context, NavigationConstants.NAVIGATION_VIEW_DARK_THEME); prefLightTheme = prefLightTheme == 0 ? R.style.NavigationViewLight : prefLightTheme; prefDarkTheme = prefLightTheme == 0 ? R.style.NavigationViewDark : prefDarkTheme; context.setTheme(nightModeEnabled ? prefDarkTheme : prefLightTheme); return; } TypedArray styledAttributes = context.obtainStyledAttributes(attrs, R.styleable.NavigationView); int lightTheme = styledAttributes.getResourceId(R.styleable.NavigationView_navigationLightTheme, R.style.NavigationViewLight); int darkTheme = styledAttributes.getResourceId(R.styleable.NavigationView_navigationDarkTheme, R.style.NavigationViewDark); styledAttributes.recycle(); context.setTheme(nightModeEnabled ? darkTheme : lightTheme); }
java
private void handleResidueAnnotation(String seqName, String featureName, String value) { if (featureName.equals(GR_SURFACE_ACCESSIBILITY)) { stockholmStructure.addSurfaceAccessibility(seqName, value); } else if (featureName.equals(GR_TRANS_MEMBRANE)) { stockholmStructure.addTransMembrane(seqName, value); } else if (featureName.equals(GR_POSTERIOR_PROBABILITY)) { stockholmStructure.addPosteriorProbability(seqName, value); } else if (featureName.equals(GR_LIGAND_BINDING)) { stockholmStructure.addLigandBinding(seqName, value); } else if (featureName.equals(GR_ACTIVE_SITE)) { stockholmStructure.addActiveSite(seqName, value); } else if (featureName.equals(GR_AS_PFAM_PREDICTED)) { stockholmStructure.addASPFamPredicted(seqName, value); } else if (featureName.equals(GR_AS_SWISSPROT)) { stockholmStructure.addASSwissProt(seqName, value); } else if (featureName.equals(GR_INTRON)) { stockholmStructure.addIntron(seqName, value); } else if (featureName.equals(GR_SECONDARY_STRUCTURE)) { stockholmStructure.addSecondaryStructure(seqName, value); } else { // unknown feature logger.warn("Unknown Residue Feature [{}].\nPlease contact the Biojava team.", featureName); } }
python
def read(self, filename): ''' Read a file content. :param string filename: The storage root-relative filename :raises FileNotFound: If the file does not exists ''' if not self.backend.exists(filename): raise FileNotFound(filename) return self.backend.read(filename)
python
def _table_arg_to_table_ref(value, default_project=None): """Helper to convert a string or Table to TableReference. This function keeps TableReference and other kinds of objects unchanged. """ if isinstance(value, six.string_types): value = TableReference.from_string(value, default_project=default_project) if isinstance(value, (Table, TableListItem)): value = value.reference return value
python
def hash(self): """(property) Returns a unique hash value for the result.""" data_str = ';'.join( [str(repr(var)) for var in [self.N, self.K, self.X, self.L, self.stat, self.cutoff, self.pval, self.pval_thresh, self.escore_pval_thresh]]) data_str += ';' data = data_str.encode('UTF-8') + self.indices.tobytes() return str(hashlib.md5(data).hexdigest())
python
def post(self, ddata, url=SETUP_ENDPOINT, referer=SETUP_ENDPOINT): """Method to update some attributes on namespace.""" headers = HEADERS.copy() if referer is None: headers.pop('Referer') else: headers['Referer'] = referer # append csrftoken if 'csrfmiddlewaretoken' not in ddata.keys(): ddata['csrfmiddlewaretoken'] = self._parent.csrftoken req = self._parent.client.post(url, headers=headers, data=ddata) if req.status_code == 200: self.update()
java
@GuardedBy("evictionLock") void drainWriteBuffer() { for (int i = 0; i < WRITE_BUFFER_DRAIN_THRESHOLD; i++) { final Runnable task = writeBuffer.poll(); if (task == null) { break; } task.run(); } }
java
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-") public List<ExceptionClassFilter.Include> getIncludeList() { if (includeList == null) { includeList = new ArrayList<ExceptionClassFilter.Include>(); } return this.includeList; }
java
public static void putScript(String name, KotlinCompiledScript script) { getInstance().scripts.put(name, script); }
python
def ra(self,*args,**kwargs): """ NAME: ra PURPOSE: return the right ascension INPUT: t - (optional) time at which to get ra obs=[X,Y,Z] - (optional) position of observer (in kpc) (default=Object-wide default) OR Orbit object that corresponds to the orbit of the observer Y is ignored and always assumed to be zero ro= distance in kpc corresponding to R=1. (default=Object-wide default) OUTPUT: ra(t) HISTORY: 2011-02-23 - Written - Bovy (NYU) """ _check_roSet(self,kwargs,'ra') radec= self._radec(*args,**kwargs) return radec[:,0]
java
public void skipHttpHeader() throws IOException { if (this.httpHeaderStream != null) { // Empty the httpHeaderStream for (int available = this.httpHeaderStream.available(); this.httpHeaderStream != null && (available = this.httpHeaderStream.available()) > 0;) { // We should be in this loop once only we should only do this // buffer allocation once. byte [] buffer = new byte[available]; // The read nulls out httpHeaderStream when done with it so // need check for null in the loop control line. read(buffer, 0, available); } } }
java
@Override public void removeByCommercePriceListId(long commercePriceListId) { for (CommercePriceListUserSegmentEntryRel commercePriceListUserSegmentEntryRel : findByCommercePriceListId( commercePriceListId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commercePriceListUserSegmentEntryRel); } }
python
def sort_versions(self): """Sort entries by version. The order is typically descending, but package order functions can change this. """ if self.sorted: return for orderer in (self.solver.package_orderers or []): entries = orderer.reorder(self.entries, key=lambda x: x.package) if entries is not None: self.entries = entries self.sorted = True if self.pr: self.pr("sorted: %s packages: %s", self.package_name, repr(orderer)) return # default ordering is version descending self.entries = sorted(self.entries, key=lambda x: x.version, reverse=True) self.sorted = True if self.pr: self.pr("sorted: %s packages: version descending", self.package_name)
java
public EClass getRMI() { if (rmiEClass == null) { rmiEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(332); } return rmiEClass; }
java
public static String getURL(URI uri) { final StringBuilder buffer = new StringBuilder(); buffer.append(uri.getPath()); String query = uri.getQuery(); String fragment = uri.getFragment(); if (StringUtils.isNotBlank(query)) { buffer.append('?').append(query); } if (StringUtils.isNotBlank(fragment)) { buffer.append('#').append(fragment); } return buffer.toString(); }
java
public int getNbWritten() throws DevFailed { manageExceptions("getNbWritten"); return attrval.w_dim.dim_x * DIM_MINI(attrval.w_dim.dim_y); }
python
def otsu(fpath): """ Returns value of otsu threshold for an image """ img = imread(fpath, as_grey=True) thresh = skimage.filter.threshold_otsu(img) return thresh
java
@Override public void endElement(String uri, String l, String q) { /* * 1. If current element is a String, update its value from the string buffer. * 2. Add the element to parent. */ XDB.Element element = Enum.valueOf(XDB.Element.class, l.toUpperCase()); switch (element) { case M: break; case P: _binder.put(_name, _text.toString()); break; case T: break; case H: _heading = _data.toArray(new String[_data.size()]); break; case R: ++_rindex; break; case C: if (_heading == null) { _data.add(_text.toString()); } else { _binder.put(_name + '[' + _rindex + "]." + _heading[_cindex], _text.toString()); ++_cindex; } break; } _text.setLength(0); }
java
public QueueReference build(Map<String, String> params) throws IOException { return build(params, null,false); }
java
public Observable<ExpressRouteGatewayInner> createOrUpdateAsync(String resourceGroupName, String expressRouteGatewayName, ExpressRouteGatewayInner putExpressRouteGatewayParameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, expressRouteGatewayName, putExpressRouteGatewayParameters).map(new Func1<ServiceResponse<ExpressRouteGatewayInner>, ExpressRouteGatewayInner>() { @Override public ExpressRouteGatewayInner call(ServiceResponse<ExpressRouteGatewayInner> response) { return response.body(); } }); }
python
def time_stops(self): """ Valid time steps for this service as a list of datetime objects. """ if not self.supports_time: return [] if self.service.calendar == 'standard': units = self.service.time_interval_units interval = self.service.time_interval steps = [self.time_start] if units in ('years', 'decades', 'centuries'): if units == 'years': years = interval elif units == 'decades': years = 10 * interval else: years = 100 * interval next_value = lambda x: x.replace(year=x.year + years) elif units == 'months': def _fn(x): year = x.year + (x.month+interval-1) // 12 month = (x.month+interval) % 12 or 12 day = min(x.day, calendar.monthrange(year, month)[1]) return x.replace(year=year, month=month, day=day) next_value = _fn else: if units == 'milliseconds': delta = timedelta(milliseconds=interval) elif units == 'seconds': delta = timedelta(seconds=interval) elif units == 'minutes': delta = timedelta(minutes=interval) elif units == 'hours': delta = timedelta(hours=interval) elif units == 'days': delta = timedelta(days=interval) elif units == 'weeks': delta = timedelta(weeks=interval) else: raise ValidationError( "Service has an invalid time_interval_units: {}".format(self.service.time_interval_units) ) next_value = lambda x: x + delta while steps[-1] < self.time_end: value = next_value(steps[-1]) if value > self.time_end: break steps.append(value) return steps else: # TODO raise NotImplementedError
java
public Observable<Void> deleteAsync(String locationName, String longTermRetentionServerName, String longTermRetentionDatabaseName, String backupName) { return deleteWithServiceResponseAsync(locationName, longTermRetentionServerName, longTermRetentionDatabaseName, backupName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); }
python
def _set_group_best(self, v, load=False): """ Setter method for group_best, mapped from YANG variable /routing_system/route_map/content/match/additional_paths/advertise_set/group_best (empty) If this variable is read-only (config: false) in the source YANG file, then _set_group_best is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_group_best() directly. YANG Description: BGP Add-Path advertise group-best path """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="group-best", rest_name="group-best", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP Add-Path advertise group-best path', u'code-name': u'additional-paths-advertise-set-group-best', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='empty', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """group_best must be of a type compatible with empty""", 'defined-type': "empty", 'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="group-best", rest_name="group-best", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP Add-Path advertise group-best path', u'code-name': u'additional-paths-advertise-set-group-best', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='empty', is_config=True)""", }) self.__group_best = t if hasattr(self, '_set'): self._set()
java
public void setupKeys() { KeyAreaInfo keyArea = null; keyArea = new KeyAreaInfo(this, Constants.UNIQUE, ID_KEY); keyArea.addKeyField(ID, Constants.ASCENDING); keyArea = new KeyAreaInfo(this, Constants.UNIQUE, KEY_FILENAME_KEY); keyArea.addKeyField(KEY_FILENAME, Constants.ASCENDING); keyArea.addKeyField(KEY_NUMBER, Constants.ASCENDING); }
python
def change_time(filename, newtime): """Change the time of a process or group of processes by writing a new time to the time file.""" with open(filename, "w") as faketimetxt_handle: faketimetxt_handle.write("@" + newtime.strftime("%Y-%m-%d %H:%M:%S"))
python
def tail(self, rows: int=5): """ Returns the main dataframe's tail :param rows: number of rows to print, defaults to 5 :param rows: int, optional :return: a pandas dataframe :rtype: pd.DataFrame :example: ``ds.tail()`` """ if self.df is None: self.warning("Dataframe is empty: no tail available") return self.df.tail(rows)
python
def time_series_rdd_from_observations(dt_index, df, ts_col, key_col, val_col): """ Instantiates a TimeSeriesRDD from a DataFrame of observations. An observation is a row containing a timestamp, a string key, and float value. Parameters ---------- dt_index : DateTimeIndex The index of the RDD to create. Observations not contained in this index will be ignored. df : DataFrame ts_col : string The name of the column in the DataFrame containing the timestamps. key_col : string The name of the column in the DataFrame containing the keys. val_col : string The name of the column in the DataFrame containing the values. """ jvm = df._sc._jvm jtsrdd = jvm.com.cloudera.sparkts.api.java.JavaTimeSeriesRDDFactory.timeSeriesRDDFromObservations( \ dt_index._jdt_index, df._jdf, ts_col, key_col, val_col) return TimeSeriesRDD(None, None, jtsrdd, df._sc)
java
protected CellFormatter createDefaultFormatter(final String name, final Locale... locales) { final String key = String.format("format.%s", name); final String defaultFormat = messageResolver.getMessage(key); if(defaultFormat == null) { return null; } CellFormatter formatter = createFormatter(defaultFormat); // ロケールのフォーマットの取得 for(Locale locale : locales) { final String localeFormat = messageResolver.getMessage(locale, key, null); if(localeFormat == null) { continue; } final LocaleSwitchFormatter switchFormatter; if(formatter instanceof LocaleSwitchFormatter) { switchFormatter = (LocaleSwitchFormatter) formatter; } else { // LocaleSwitchFormatterに入れ替える。 switchFormatter = new LocaleSwitchFormatter(formatter); formatter = switchFormatter; } // ロケールごとのフォーマットの登録 if(locale.equals(Locale.JAPANESE)) { switchFormatter.register(createFormatter(localeFormat), JAPANESE_LOCALES); } else { switchFormatter.register(createFormatter(localeFormat), locale); } } return formatter; }
java
@Override public int read() throws IOException { try { int value = super.read(); eofReached = value == -1; if (!eofReached) { bytesRead++; } return value; } catch (Exception e) { return onException(e); } }
python
def controller(self): """ Check if multiple controllers are connected. :returns: Return the controller_id of the active controller. :rtype: string """ if hasattr(self, 'controller_id'): if len(self.controller_info['controllers']) > 1: raise TypeError( 'Only one controller per account is supported.' ) return self.controller_id raise AttributeError('No controllers assigned to this account.')
java
public void setMailTimeout(Integer timeout) throws SecurityException { checkWriteAccess(); boolean hasAccess = ConfigWebUtil.hasAccess(config, SecurityManager.TYPE_MAIL); if (!hasAccess) throw new SecurityException("no access to update mail server settings"); Element mail = _getRootElement("mail"); mail.setAttribute("timeout", Caster.toString(timeout, "")); // config.setMailTimeout(timeout); }
python
def start(self): """Overrides default start behaviour by raising ConnectionError instead of custom requests_mock.exceptions.NoMockAddress. """ if self._http_last_send is not None: raise RuntimeError('HttpMock has already been started') # 1) save request.Session.send in self._last_send # 2) replace request.Session.send with MockerCore send function super(HttpMock, self).start() # 3) save MockerCore send function in self._http_last_send # 4) replace request.Session.send with HttpMock send function self._patch_last_send()
python
def create(backbone: ModelFactory, input_block: typing.Optional[ModelFactory]=None): """ Vel factory function """ if input_block is None: input_block = IdentityFactory() return QModelFactory(input_block=input_block, backbone=backbone)
python
def write(self, script): # type: (str) -> None """Send a script to FORM. Write the given script to the communication channel to FORM. It could be buffered and so FORM may not execute the sent script until :meth:`flush` or :meth:`read` is called. """ if self._closed: raise IOError('tried to write to closed connection') script = script.strip() if script: assert self._parentout is not None self._parentout.write(script) self._parentout.write('\n')
python
def version_add(package, version, pkghash, force=False): """ Add a new version for a given package hash. Version format needs to follow PEP 440. Versions are permanent - once created, they cannot be modified or deleted. """ team, owner, pkg = parse_package(package) session = _get_session(team) try: Version(version) except ValueError: url = "https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes" raise CommandException( "Invalid version format; see %s" % url ) if not force: answer = input("Versions cannot be modified or deleted; are you sure? (y/n) ") if answer.lower() != 'y': return session.put( "{url}/api/version/{owner}/{pkg}/{version}".format( url=get_registry_url(team), owner=owner, pkg=pkg, version=version ), data=json.dumps(dict( hash=_match_hash(package, pkghash) )) )
python
def from_api_response(cls, reddit_session, json_dict): """Return an instance of the appropriate class from the json dict.""" # The Multireddit response contains the Subreddits attribute as a list # of dicts of the form {'name': 'subredditname'}. # We must convert each of these into a Subreddit object. json_dict['subreddits'] = [Subreddit(reddit_session, item['name']) for item in json_dict['subreddits']] return cls(reddit_session, None, None, json_dict)
python
def create_detector(self, detector): """Creates a new detector. Args: detector (object): the detector model object. Will be serialized as JSON. Returns: dictionary of the response (created detector model). """ resp = self._post(self._u(self._DETECTOR_ENDPOINT_SUFFIX), data=detector) resp.raise_for_status() return resp.json()
java
public void setBinding(String binding) throws JspException { if (!isValueReference(binding)) { throw new IllegalArgumentException("not a valid binding: " + binding); } _binding = binding; }
python
def parseFullScan(self, i, modifications=True): """ parses scan info for giving a Spectrum Obj for plotting. takes significantly longer since it has to unzip/parse xml """ scanObj = PeptideObject() peptide = str(i[1]) pid=i[2] if modifications: sql = 'select aam.ModificationName,pam.Position,aam.DeltaMass from peptidesaminoacidmodifications pam left join aminoacidmodifications aam on (aam.AminoAcidModificationID=pam.AminoAcidModificationID) where pam.PeptideID=%s'%pid for row in self.conn.execute(sql): scanObj.addModification(peptide[row[1]], str(row[1]), str(row[2]), row[0]) scanObj.peptide = peptide if self.decompressScanInfo(scanObj, i[0]): return scanObj return None
python
def ParseRecord(self, parser_mediator, key, structure): """Parse each record structure and return an EventObject if applicable. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. key (str): identifier of the structure of tokens. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file. Raises: ParseError: when the structure type is unknown. """ if key not in ('header', 'logline'): raise errors.ParseError( 'Unable to parse record, unknown structure: {0:s}'.format(key)) if key == 'logline': self._ParseLine(parser_mediator, structure) elif key == 'header': self._ParseHeader(parser_mediator, structure)
java
public void setLocaterInfo(SourceLocator locator) { m_publicId = locator.getPublicId(); m_systemId = locator.getSystemId(); super.setLocaterInfo(locator); }
python
def QA_save_tdx_to_mongo(file_dir, client=DATABASE): """save file Arguments: file_dir {str:direction} -- 文件的地址 Keyword Arguments: client {Mongodb:Connection} -- Mongo Connection (default: {DATABASE}) """ reader = TdxMinBarReader() __coll = client.stock_min_five for a, v, files in os.walk(file_dir): for file in files: if (str(file)[0:2] == 'sh' and int(str(file)[2]) == 6) or \ (str(file)[0:2] == 'sz' and int(str(file)[2]) == 0) or \ (str(file)[0:2] == 'sz' and int(str(file)[2]) == 3): QA_util_log_info('Now_saving ' + str(file) [2:8] + '\'s 5 min tick') fname = file_dir + os.sep + file df = reader.get_df(fname) df['code'] = str(file)[2:8] df['market'] = str(file)[0:2] df['datetime'] = [str(x) for x in list(df.index)] df['date'] = [str(x)[0:10] for x in list(df.index)] df['time_stamp'] = df['datetime'].apply( lambda x: QA_util_time_stamp(x)) df['date_stamp'] = df['date'].apply( lambda x: QA_util_date_stamp(x)) data_json = json.loads(df.to_json(orient='records')) __coll.insert_many(data_json)
python
def cross_entropy_loss(logits, one_hot_labels, label_smoothing=0, weight=1.0, scope=None): """Define a Cross Entropy loss using softmax_cross_entropy_with_logits. It can scale the loss by weight factor, and smooth the labels. Args: logits: [batch_size, num_classes] logits outputs of the network . one_hot_labels: [batch_size, num_classes] target one_hot_encoded labels. label_smoothing: if greater than 0 then smooth the labels. weight: scale the loss by this factor. scope: Optional scope for name_scope. Returns: A tensor with the softmax_cross_entropy loss. """ logits.get_shape().assert_is_compatible_with(one_hot_labels.get_shape()) with tf.name_scope(scope, 'CrossEntropyLoss', [logits, one_hot_labels]): num_classes = one_hot_labels.get_shape()[-1].value one_hot_labels = tf.cast(one_hot_labels, logits.dtype) if label_smoothing > 0: smooth_positives = 1.0 - label_smoothing smooth_negatives = label_smoothing / num_classes one_hot_labels = one_hot_labels * smooth_positives + smooth_negatives cross_entropy = tf.contrib.nn.deprecated_flipped_softmax_cross_entropy_with_logits( logits, one_hot_labels, name='xentropy') weight = tf.convert_to_tensor(weight, dtype=logits.dtype.base_dtype, name='loss_weight') loss = tf.multiply(weight, tf.reduce_mean(cross_entropy), name='value') tf.add_to_collection(LOSSES_COLLECTION, loss) return loss
python
def position_result_list(change_list): """ Returns a template which iters through the models and appends a new position column. """ result = result_list(change_list) # Remove sortable attributes for x in range(0, len(result['result_headers'])): result['result_headers'][x]['sorted'] = False if result['result_headers'][x]['sortable']: result['result_headers'][x]['class_attrib'] = mark_safe( ' class="sortable"') # Append position <th> element result['result_headers'].append({ 'url_remove': '?o=', 'sort_priority': 1, 'sortable': True, 'class_attrib': mark_safe(' class="sortable sorted ascending"'), 'sorted': True, 'text': 'position', 'ascending': True, 'url_primary': '?o=-1', 'url_toggle': '?o=-1', }) # Append the editable field to every result item for x in range(0, len(result['results'])): obj = change_list.result_list[x] # Get position object c_type = ContentType.objects.get_for_model(obj) try: object_position = ObjectPosition.objects.get( content_type__pk=c_type.id, object_id=obj.id) except ObjectPosition.DoesNotExist: object_position = ObjectPosition.objects.create(content_object=obj) # Add the <td> html = ('<td><input class="vTextField" id="id_position-{0}"' ' maxlength="10" name="position-{0}" type="text"' ' value="{1}" /></td>').format(object_position.id, object_position.position) result['results'][x].append(mark_safe(html)) return result
python
async def check_passwd(self, identity: str, passwd: str ) -> SessionIdentity : """ 通过密码检查身份 """ assert identity value, _ = await self._client.get(f"{self._prefix_identity}/{identity}") if value is None: logger.debug(f'Not found identity: {identity}') raise Unauthorized(f"无此登录身份'{identity}'") profile = json.loads(value.decode('utf-8')) user_id = profile['user_id'] identity = profile['identity'] hashed = profile['hashed'] if sha256_crypt.verify(passwd, hashed): return SessionIdentity(user_id=user_id, identity=identity) else: raise Unauthorized(f"登录身份'{identity}'认证失败")
python
def watermark(url, args=''): """ Returns the URL to a watermarked copy of the image specified. """ # initialize some variables args = args.split(',') params = dict( name=args.pop(0), opacity=0.5, tile=False, scale=1.0, greyscale=False, rotation=0, position=None, quality=QUALITY, obscure=OBSCURE_ORIGINAL, random_position_once=RANDOM_POSITION_ONCE, ) params['url'] = unquote(url) # iterate over all parameters to see what we need to do for arg in args: key, value = arg.split('=') key, value = key.strip(), value.strip() if key == 'position': params['position'] = value elif key == 'opacity': params['opacity'] = utils._percent(value) elif key == 'tile': params['tile'] = bool(int(value)) elif key == 'scale': params['scale'] = value elif key == 'greyscale': params['greyscale'] = bool(int(value)) elif key == 'rotation': params['rotation'] = value elif key == 'quality': params['quality'] = int(value) elif key == 'obscure': params['obscure'] = bool(int(value)) elif key == 'random_position_once': params['random_position_once'] = bool(int(value)) return Watermarker()(**params)
java
public void clear() { //release a static buffer if possible if (_buffersToReuse != null && !_buffersToReuse.isEmpty()) { StaticBuffers.getInstance().releaseByteBuffer(BUFFER_KEY, _buffersToReuse.peek()); } else if (!_buffers.isEmpty()) { StaticBuffers.getInstance().releaseByteBuffer(BUFFER_KEY, _buffers.get(0)); } if (_buffersToReuse != null) { _buffersToReuse.clear(); } _buffers.clear(); _position = 0; _flushPosition = 0; _size = 0; }
java
public static Object jsoupParse(final ChainedHttpConfig config, final FromServer fromServer) { try { return Jsoup.parse(fromServer.getInputStream(), fromServer.getCharset().name(), fromServer.getUri().toString()); } catch (IOException e) { throw new TransportingException(e); } }
java
public static ExpressionTree stripParentheses(ExpressionTree tree) { while (tree instanceof ParenthesizedTree) { tree = ((ParenthesizedTree) tree).getExpression(); } return tree; }