language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def split_from_df(self, col:IntsOrStrs=2): "Split the data from the `col` in the dataframe in `self.inner_df`." valid_idx = np.where(self.inner_df.iloc[:,df_names_to_idx(col, self.inner_df)])[0] return self.split_by_idx(valid_idx)
java
public static void removeName(Node n) { checkState(n.isFunction() || n.isClass()); Node originalName = n.getFirstChild(); Node emptyName = n.isFunction() ? IR.name("") : IR.empty(); n.replaceChild(originalName, emptyName.useSourceInfoFrom(originalName)); }
python
def run_command(local_root, command, env_var=True, pipeto=None, retry=0, environ=None): """Run a command and return the output. :raise CalledProcessError: Command exits non-zero. :param str local_root: Local path to git root directory. :param iter command: Command to run. :param dict environ: Environment variables to set/override in the command. :param bool env_var: Define GIT_DIR environment variable (on non-Windows). :param function pipeto: Pipe `command`'s stdout to this function (only parameter given). :param int retry: Retry this many times on CalledProcessError after 0.1 seconds. :return: Command output. :rtype: str """ log = logging.getLogger(__name__) # Setup env. env = os.environ.copy() if environ: env.update(environ) if env_var and not IS_WINDOWS: env['GIT_DIR'] = os.path.join(local_root, '.git') else: env.pop('GIT_DIR', None) # Run command. with open(os.devnull) as null: main = Popen(command, cwd=local_root, env=env, stdout=PIPE, stderr=PIPE if pipeto else STDOUT, stdin=null) if pipeto: pipeto(main.stdout) main_output = main.communicate()[1].decode('utf-8') # Might deadlock if stderr is written to a lot. else: main_output = main.communicate()[0].decode('utf-8') log.debug(json.dumps(dict(cwd=local_root, command=command, code=main.poll(), output=main_output))) # Verify success. if main.poll() != 0: if retry < 1: raise CalledProcessError(main.poll(), command, output=main_output) time.sleep(0.1) return run_command(local_root, command, env_var, pipeto, retry - 1) return main_output
java
public void ifHasMemberWithTag(String template, Properties attributes) throws XDocletException { ArrayList allMemberNames = new ArrayList(); HashMap allMembers = new HashMap(); boolean hasTag = false; addMembers(allMemberNames, allMembers, getCurrentClass(), null, null, null); for (Iterator it = allMemberNames.iterator(); it.hasNext(); ) { XMember member = (XMember) allMembers.get(it.next()); if (member instanceof XField) { setCurrentField((XField)member); if (hasTag(attributes, FOR_FIELD)) { hasTag = true; } setCurrentField(null); } else if (member instanceof XMethod) { setCurrentMethod((XMethod)member); if (hasTag(attributes, FOR_METHOD)) { hasTag = true; } setCurrentMethod(null); } if (hasTag) { generate(template); break; } } }
python
def major_axis(points): """ Returns an approximate vector representing the major axis of points Parameters ------------- points: (n, dimension) float, points in space Returns ------------- axis: (dimension,) float, vector along approximate major axis """ U, S, V = np.linalg.svd(points) axis = util.unitize(np.dot(S, V)) return axis
python
def _randomize_direction(base_heading, sigma) -> int: """ Creates a variation in direction Args: base_heading: base direction sigma: sigma value for gaussian variation Returns: random direction """ val = MissionWeather._gauss(base_heading, sigma) val = MissionWeather._normalize_direction(val) return val
java
public void setLastHlsIngestDateTime(com.google.api.ads.admanager.axis.v201808.DateTime lastHlsIngestDateTime) { this.lastHlsIngestDateTime = lastHlsIngestDateTime; }
python
def p_group_command(p): '''group_command : LEFT_CURLY compound_list RIGHT_CURLY''' lcurly = ast.node(kind='reservedword', word=p[1], pos=p.lexspan(1)) rcurly = ast.node(kind='reservedword', word=p[3], pos=p.lexspan(3)) parts = [lcurly, p[2], rcurly] p[0] = ast.node(kind='compound', list=parts, redirects=[], pos=_partsspan(parts))
java
public static <F extends ConfigurationComponent<F>> void basicValidate(final F component, final String section) throws ConfigException { final Optional<F> maybeComponent = Optional.ofNullable(component); if (maybeComponent.isPresent()) { maybeComponent.get().basicValidate(section); } }
java
protected void compareServerMode(Session other) throws ClientException { if (transferMode != MODE_EBLOCK) { super.compareServerMode(other); } else { if (serverMode == SERVER_DEFAULT && other.serverMode == SERVER_DEFAULT) { // this is OK } else { // active and passive side had already been set; // make sure that it has been done correctly. // in mode E, source must be active and dest passive if (!((serverMode == SERVER_EACT && other.serverMode == SERVER_EPAS) || (serverMode == SERVER_EPAS && other.serverMode == SERVER_EACT) || (serverMode == SERVER_ACTIVE && other.serverMode == SERVER_PASSIVE) || (serverMode == SERVER_PASSIVE && other.serverMode == SERVER_ACTIVE))) { throw new ClientException(ClientException.BAD_SERVER_MODE, "One server must be active" + " and other must be passive"); } } } }
java
public static Account getAccount(Context mContext) { String ACCOUNT_NAME = mContext.getString(R.string.app_name); return new Account(ACCOUNT_NAME, ACCOUNT_NAME); }
java
int closestPoint( Point2D_F64 target ) { double bestDistance = Double.MAX_VALUE; int bestIndex = -1; for (int i = 0; i < contour.size(); i++) { Point2D_I32 c = contour.get(i); double d = UtilPoint2D_F64.distanceSq(target.x,target.y,c.x,c.y); if( d < bestDistance ) { bestDistance = d; bestIndex = i; } } return bestIndex; }
java
protected Node inlineThumbnail(Document doc, ParsedURL urldata, Node eold) { RenderableImage img = ThumbnailRegistryEntry.handleURL(urldata); if(img == null) { LoggingUtil.warning("Image not found in registry: " + urldata.toString()); return null; } ByteArrayOutputStream os = new ByteArrayOutputStream(); try { os.write(SVGSyntax.DATA_PROTOCOL_PNG_PREFIX.getBytes()); Base64EncoderStream encoder = new Base64EncoderStream(os); ImageIO.write(img.createDefaultRendering(), "png", encoder); encoder.close(); } catch(IOException e) { LoggingUtil.exception("Exception serializing image to png", e); return null; } Element i = (Element) super.cloneNode(doc, eold); i.setAttributeNS(SVGConstants.XLINK_NAMESPACE_URI, SVGConstants.XLINK_HREF_ATTRIBUTE, os.toString().replaceAll("\\s*[\\r\\n]+\\s*", "")); return i; }
java
public static base_response add(nitro_service client, responderaction resource) throws Exception { responderaction addresource = new responderaction(); addresource.name = resource.name; addresource.type = resource.type; addresource.target = resource.target; addresource.htmlpage = resource.htmlpage; addresource.bypasssafetycheck = resource.bypasssafetycheck; addresource.comment = resource.comment; return addresource.add_resource(client); }
java
public static Matcher<MethodTree> methodIsNamed(final String methodName) { return new Matcher<MethodTree>() { @Override public boolean matches(MethodTree methodTree, VisitorState state) { return methodTree.getName().contentEquals(methodName); } }; }
java
private String[] collectPartitionKeyAnnotations(Method method) { Annotation[][] annotations = method.getParameterAnnotations(); String[] keyMappings = new String[annotations.length]; boolean keyMappingFound = false; Map<String, Integer> unique = Maps.newHashMap(); for (int i = 0; i < annotations.length; i++) { PartitionKey annotation = findPartitionKeyAnnotation(annotations[i]); if (annotation == null) { continue; } String key = checkNotNull(annotation.value()); Integer prev = unique.put(key, i); checkState(prev == null, "Method '%s' has multiple arguments annotated with the same @PartitionKey " + "value '%s': arguments %s and %s", method, key, prev, i); keyMappings[i] = key; keyMappingFound = true; } return keyMappingFound ? keyMappings : null; }
python
def setup_directories(builddir): """Create the in and out directories of the container.""" build_dir = local.path(builddir) in_dir = build_dir / "container-in" out_dir = build_dir / "container-out" if not in_dir.exists(): in_dir.mkdir() if not out_dir.exists(): out_dir.mkdir()
java
public Task extendVirtualDisk_Task(String name, Datacenter datacenter, long newCapacityKb, Boolean eagerZero) throws FileFault, RuntimeFault, RemoteException { return new Task(getServerConnection(), getVimService().extendVirtualDisk_Task(getMOR(), name, datacenter == null ? null : datacenter.getMOR(), newCapacityKb, eagerZero)); }
java
public GetSignatureResponse getSignature(String nonceStr, long timestame, String url) { BeanUtil.requireNonNull(url, "请传入当前网页的URL,不包含#及其后面部分"); GetSignatureResponse response = new GetSignatureResponse(); String jsApiTicket = this.config.getJsApiTicket(); String sign; try { sign = JsApiUtil.sign(jsApiTicket, nonceStr, timestame, url); } catch (Exception e) { LOG.error("获取签名异常:", e); response.setErrcode(ResultType.OTHER_ERROR.getCode().toString()); response.setErrmsg("获取签名异常"); return response; } response.setNoncestr(nonceStr); response.setSignature(sign); response.setTimestamp(timestame); response.setUrl(url); response.setErrcode(ResultType.SUCCESS.getCode().toString()); return response; }
python
def david_results_iterator(fn, verbose=False): """ Iterate over a DAVID result set and yeild GeneOntologyTerm objects representing each of the terms reported. The expected format for a DAVID result file is tab-seperated format. The following fields should be present: === =============== ========== ==================================== Num Field Type Example === =============== ========== ==================================== 0 Category string GOTERM_BP_FAT 1 Term string GO:0046907~intracellular transport 2 Count int 43 3 Percent float 11.345646437994723 4 PValue float 1.3232857694449546E-9 5 Genes string ARSB, KPNA6, GNAS 6 List Total int 310 7 Pop Hits int 657 8 Pop Total int 13528 9 Fold Enrichment float 2.8561103746256196 10 Bonferroni float 2.6293654579179204E-6 11 Benjamini float 2.6293654579179204E-6 12 FDR float 2.2734203852792234E-6 === =============== ========== ==================================== The first line is a header giving the field names -- this is ignored though, and we expect them in the order given above. Most of the fields are ignored at present; we take fields 0,1, and 11 (as the significance/p-value). When parsing the term field, we try to extract a term ID by splitting on tilde, but if we can't then this is set to None. :param fn: the file to parse :param verbose: if True, output progress to stderr. """ first = True for line in open(fn): line = line.strip() if line == "": continue if first: first = False continue parts = line.split("\t") if len(parts) != NUM_FIELDS_IN_DAVID_RECORD: raise IOError("failed to parse " + fn + " as DAVID result file. " + "Expected " + str(NUM_FIELDS_IN_DAVID_RECORD) + " " + "tab-separated fields, but found " + str(len(parts)) + " instead") n_parts = parts[1].split("~") name = n_parts[-1].strip() identifier = n_parts[0] if len(n_parts) > 1 else None catagory = parts[0].strip() try: p_val = float(parts[PVAL_FIELD_NUM]) except ValueError: raise IOError("Failed to parse " + fn + " as DAVID result file. " + "Expected field " + str(PVAL_FIELD_NUM) + " " + "to contain a floating point number " + "(Benjamini), found this instead: " + str(parts[PVAL_FIELD_NUM])) yield GeneOntologyEnrichmentResult(name, p_val, identifier, catagory)
java
public static base_response delete(nitro_service client, String selectorname) throws Exception { cacheselector deleteresource = new cacheselector(); deleteresource.selectorname = selectorname; return deleteresource.delete_resource(client); }
python
def command_subscribe(self, command, **kwargs): """ Subscribe to a topic or list of topics """ topic = command['topic'] encoding = command.get('encoding', 'utf-8') name = command['name'] if not hasattr(self.engine, '_mqtt'): self.engine._mqtt = {} self.engine.variables[name] = [] def on_message(client, userdata, msg): userdata.append(msg.payload.decode(encoding)) self.engine._mqtt[name] = client = mqtt.Client( userdata=self.engine.variables[name]) client.on_message = on_message client.connect( command['host'], port=int(command['port']) ) client.subscribe(topic) client.loop_start() self.engine.register_teardown_callback( client.loop_stop)
python
def _get_policy_set(self, policy_set_id): """ Get a specific policy set by id. """ uri = self._get_policy_set_uri(guid=policy_set_id) return self.service._get(uri)
python
def average_spectrogram(timeseries, method_func, stride, *args, **kwargs): """Generate an average spectrogram using a method function Each time bin of the resulting spectrogram is a PSD generated using the method_func """ # unpack CSD TimeSeries pair, or single timeseries try: timeseries, other = timeseries except ValueError: timeseries = timeseries other = None from ...spectrogram import Spectrogram nproc = kwargs.pop('nproc', 1) # get params epoch = timeseries.t0.value nstride = seconds_to_samples(stride, timeseries.sample_rate) kwargs['fftlength'] = kwargs.pop('fftlength', stride) or stride normalize_fft_params(timeseries, kwargs=kwargs, func=method_func) nfft = kwargs['nfft'] noverlap = kwargs['noverlap'] # sanity check parameters if nstride > timeseries.size: raise ValueError("stride cannot be greater than the duration of " "this TimeSeries") if nfft > nstride: raise ValueError("fftlength cannot be greater than stride") if noverlap >= nfft: raise ValueError("overlap must be less than fftlength") # set up single process Spectrogram method def _psd(series): """Calculate a single PSD for a spectrogram """ psd_ = _psdn(series, method_func, *args, **kwargs) del psd_.epoch # fixes Segmentation fault (no idea why it faults) return psd_ # define chunks tschunks = _chunk_timeseries(timeseries, nstride, noverlap) if other is not None: otherchunks = _chunk_timeseries(other, nstride, noverlap) tschunks = zip(tschunks, otherchunks) # calculate PSDs psds = mp_utils.multiprocess_with_queues(nproc, _psd, tschunks) # recombobulate PSDs into a spectrogram return Spectrogram.from_spectra(*psds, epoch=epoch, dt=stride)
java
public void removeEventListener() { logger.fine("RXTXPort:removeEventListener() called"); waitForTheNativeCodeSilly(); //if( monThread != null && monThread.isAlive() ) if (monThreadisInterrupted == true) { logger.fine(" RXTXPort:removeEventListener() already interrupted"); monThread = null; SPEventListener = null; return; } else if (monThread != null && monThread.isAlive()) { logger.fine(" RXTXPort:Interrupt=true"); monThreadisInterrupted = true; /* Notify all threads in this PID that something is up They will call back to see if its their thread using isInterrupted(). */ logger.fine(" RXTXPort:calling interruptEventLoop"); interruptEventLoop(); logger.fine(" RXTXPort:calling monThread.join()"); try { // wait a reasonable moment for the death of the monitor thread monThread.join(3000); } catch (InterruptedException ex) { // somebody called interrupt() on us (ie wants us to abort) // we dont propagate InterruptedExceptions so lets re-set the flag Thread.currentThread().interrupt(); return; } { logger.fine(" MonThread is still alive!"); } } monThread = null; SPEventListener = null; MonitorThreadLock = false; MonitorThreadAlive = false; monThreadisInterrupted = true; logger.fine("RXTXPort:removeEventListener() returning"); }
java
public void putUnchecked(CodeBuilder adapter) { checkState(!isStatic(), "This field is static!"); adapter.putField(owner().type(), name(), type()); }
python
def function(self, x, y, amp, R_sersic, n_sersic, e1, e2, center_x=0, center_y=0): """ returns Sersic profile """ #if n_sersic < 0.2: # n_sersic = 0.2 #if R_sersic < 10.**(-6): # R_sersic = 10.**(-6) R_sersic = np.maximum(0, R_sersic) phi_G, q = param_util.ellipticity2phi_q(e1, e2) x_shift = x - center_x y_shift = y - center_y cos_phi = np.cos(phi_G) sin_phi = np.sin(phi_G) xt1 = cos_phi*x_shift+sin_phi*y_shift xt2 = -sin_phi*x_shift+cos_phi*y_shift xt2difq2 = xt2/(q*q) R_ = np.sqrt(xt1*xt1+xt2*xt2difq2) if isinstance(R_, int) or isinstance(R_, float): R_ = max(self._smoothing, R_) else: R_[R_ < self._smoothing] = self._smoothing k, bn = self.k_bn(n_sersic, R_sersic) R_frac = R_/R_sersic R_frac = R_frac.astype(np.float32) if isinstance(R_, int) or isinstance(R_, float): if R_frac > 100: result = 0 else: exponent = -bn*(R_frac**(1./n_sersic)-1.) result = amp * np.exp(exponent) else: R_frac_real = R_frac[R_frac <= 100] exponent = -bn*(R_frac_real**(1./n_sersic)-1.) result = np.zeros_like(R_) result[R_frac <= 100] = amp * np.exp(exponent) return np.nan_to_num(result)
python
def execfile(fname, _globals, _locals): """ Usage: execfile('path/to/file.py', globals(), locals()) """ if os.path.exists(fname): with open(fname) as f: code = compile(f.read(), os.path.basename(fname), 'exec') exec(code, _globals, _locals) return True else: return False
python
def application(cls, f): """Decorate a function as responder that accepts the request as first argument. This works like the :func:`responder` decorator but the function is passed the request object as first argument and the request object will be closed automatically:: @Request.application def my_wsgi_app(request): return Response('Hello World!') As of Werkzeug 0.14 HTTP exceptions are automatically caught and converted to responses instead of failing. :param f: the WSGI callable to decorate :return: a new WSGI callable """ #: return a callable that wraps the -2nd argument with the request #: and calls the function with all the arguments up to that one and #: the request. The return value is then called with the latest #: two arguments. This makes it possible to use this decorator for #: both methods and standalone WSGI functions. from ..exceptions import HTTPException def application(*args): request = cls(args[-2]) with request: try: resp = f(*args[:-2] + (request,)) except HTTPException as e: resp = e.get_response(args[-2]) return resp(*args[-2:]) return update_wrapper(application, f)
python
def id_pools(self): """ Gets the IdPools API client. Returns: IdPools: """ if not self.__id_pools: self.__id_pools = IdPools(self.__connection) return self.__id_pools
java
@Override public final void visit(final SubmitterLinkDocumentMongo document) { setGedObject(new SubmitterLink(getParent(), "Submitter", new ObjectId(document.getString()))); }
python
def _isValidTrigger(block, ch): """check if the trigger characters are in the right context, otherwise running the indenter might be annoying to the user """ if ch == "" or ch == "\n": return True # Explicit align or new line match = rxUnindent.match(block.text()) return match is not None and \ match.group(3) == ""
python
def purge(name=None, pkgs=None, **kwargs): ''' Remove a package and extra configuration files. name The name of the package to be deleted. Multiple Package Options: pkgs A list of packages to delete. Must be passed as a python list. The ``name`` parameter will be ignored if this option is passed. .. versionadded:: 0.16.0 Returns a dict containing the changes. CLI Example: .. code-block:: bash salt '*' pkg.purge <package name> salt '*' pkg.purge <package1>,<package2>,<package3> salt '*' pkg.purge pkgs='["foo", "bar"]' ''' return remove(name=name, pkgs=pkgs, purge=True)
python
def get_stat(self, key): """ Returns a stat that was previously reported. This is necessary for reporting new stats that are derived from two stats, one of which may have been reported by an earlier run. For example, if you first use report_result to report (number of trimmed reads), and then in a later stage want to report alignment rate, then this second stat (alignment rate) will require knowing the first stat (number of trimmed reads); however, that may not have been calculated in the current pipeline run, so we must retrieve it from the stats.tsv output file. This command will retrieve such previously reported stats if they were not already calculated in the current pipeline run. :param key: key of stat to retrieve """ try: return self.stats_dict[key] except KeyError: self._refresh_stats() try: return self.stats_dict[key] except KeyError: print("Missing stat '{}'".format(key)) return None
java
public com.google.protobuf.ByteString getVmNameBytes() { java.lang.Object ref = vmName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); vmName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } }
java
public static List<DataPoint> parse(String points, String splitRegex, Class<?> type, long ts) { String[] items = points.split(splitRegex); List<DataPoint> ret = new ArrayList<DataPoint>(); for (String i : items) { if (i.length() == 0) { continue; } if (type == Long.class || type == Integer.class) { ret.add(new DataPoint(ts, Long.parseLong(i))); } else if (type == Float.class || type == Double.class) { ret.add(new DataPoint(ts, Double.parseDouble(i))); } else if (type == String.class) { ret.add(new DataPoint(ts, i)); } } return ret; }
java
public static void gc() { class GCTask extends DTask<GCTask> { public GCTask() {super(GUI_PRIORITY);} @Override public void compute2() { Log.info("Calling System.gc() now..."); System.gc(); Log.info("System.gc() finished"); tryComplete(); } } for (H2ONode node : H2O.CLOUD._memary) { GCTask t = new GCTask(); new RPC<>(node, t).call().get(); } }
java
protected void handleError(final HttpServletRequest httpServletRequest, final HttpServletResponse httpServletResponse, final Throwable throwable) throws ServletException, IOException { HttpServletHelper helper = createServletHelper(httpServletRequest, httpServletResponse); ServletUtil.handleError(helper, throwable); }
python
def top3_reduced(votes): """ Description: Top 3 alternatives 16 moment conditions values calculation Parameters: votes: ordinal preference data (numpy ndarray of integers) """ res = np.zeros(16) for vote in votes: # the top ranked alternative is in vote[0][0], second in vote[1][0] if vote[0][0] == 0: # i.e. the first alt is ranked first res[0] += 1 if vote[1][0] == 2: res[4] += 1 elif vote[1][0] == 3: res[5] += 1 elif vote[1][0] == 1 and vote[2][0] == 2: res[14] += 1 elif vote[0][0] == 1: res[1] += 1 if vote[1][0] == 0: res[6] += 1 elif vote[1][0] == 3: res[7] += 1 elif vote[1][0] == 2 and vote[2][0] == 3: res[15] += 1 elif vote[0][0] == 2: res[2] += 1 if vote[1][0] == 0: res[8] += 1 elif vote[1][0] == 1: res[9] += 1 elif vote[1][0] == 3 and vote[2][0] == 0: res[12] += 1 elif vote[0][0] == 3: res[3] += 1 if vote[1][0] == 1: res[10] += 1 elif vote[1][0] == 2: res[11] += 1 elif vote[1][0] == 0 and vote[2][0] == 1: res[13] += 1 res /= len(votes) return res
java
public void alias(final @NonNull String newId, final @Nullable Options options) { assertNotShutdown(); if (isNullOrEmpty(newId)) { throw new IllegalArgumentException("newId must not be null or empty."); } analyticsExecutor.submit( new Runnable() { @Override public void run() { final Options finalOptions; if (options == null) { finalOptions = defaultOptions; } else { finalOptions = options; } AliasPayload.Builder builder = new AliasPayload.Builder() .userId(newId) .previousId(analyticsContext.traits().currentId()); fillAndEnqueue(builder, finalOptions); } }); }
java
protected void updateDistances(Relation<V> relation, double[][] means, final WritableDataStore<Meta> metas, NumberVectorDistanceFunction<? super V> df) { for(DBIDIter id = relation.iterDBIDs(); id.valid(); id.advance()) { Meta c = metas.get(id); V fv = relation.get(id); // Update distances to means. c.secondary = -1; for(int i = 0; i < k; i++) { c.dists[i] = df.distance(fv, DoubleVector.wrap(means[i])); if(c.primary != i) { if(c.secondary < 0 || c.dists[i] < c.dists[c.secondary]) { c.secondary = i; } } } metas.put(id, c); // Changed. } }
python
def _compute_hamming_matrix(N): """Compute and store a Hamming matrix for |N| nodes. Hamming matrices have the following sizes:: N MBs == === 9 2 10 8 11 32 12 128 13 512 Given these sizes and the fact that large matrices are needed infrequently, we store computed matrices using the Joblib filesystem cache instead of adding computed matrices to the ``_hamming_matrices`` global and clogging up memory. This function is only called when |N| > ``_NUM_PRECOMPUTED_HAMMING_MATRICES``. Don't call this function directly; use |_hamming_matrix| instead. """ possible_states = np.array(list(utils.all_states((N)))) return cdist(possible_states, possible_states, 'hamming') * N
python
def recognize_verify_code(image_path, broker="ht"): """识别验证码,返回识别后的字符串,使用 tesseract 实现 :param image_path: 图片路径 :param broker: 券商 ['ht', 'yjb', 'gf', 'yh'] :return recognized: verify code string""" if broker == "gf": return detect_gf_result(image_path) if broker in ["yh_client", "gj_client"]: return detect_yh_client_result(image_path) # 调用 tesseract 识别 return default_verify_code_detect(image_path)
java
protected String buildDeleteSQL(final TableMetadata metadata, final Class<? extends Object> type, final SqlConfig sqlConfig, final boolean addCondition) { StringBuilder sql = new StringBuilder("DELETE ").append("/* ") .append(sqlConfig.getSqlAgentFactory().getSqlIdKeyName()).append(" */") .append(" FROM ").append(metadata.getTableIdentifier()).append("").append(System.lineSeparator()); if (addCondition) { boolean firstFlag = true; sql.append("WHERE").append(System.lineSeparator()); List<? extends Column> cols = !metadata.getKeyColumns().isEmpty() ? metadata.getKeyColumns() : Arrays .asList(metadata.getColumns().get(0)); for (TableMetadata.Column col : cols) { StringBuilder parts = new StringBuilder().append("\t"); if (firstFlag) { if (col.isNullable()) { parts.append("AND "); } else { parts.append(" "); } firstFlag = false; } else { parts.append("AND "); } parts.append(col.getColumnIdentifier()).append(" = ").append("/*").append(col.getCamelColumnName()) .append("*/''").append(System.lineSeparator()); if (col.isNullable()) { wrapIfComment(sql, parts, col); } else { sql.append(parts); } } } return sql.toString(); }
python
def after_init_app(self, app: FlaskUnchained): """ Configure an after request hook to set the ``csrf_token`` in the cookie. """ from flask_wtf.csrf import generate_csrf # send CSRF token in the cookie @app.after_request def set_csrf_cookie(response): if response: response.set_cookie('csrf_token', generate_csrf()) return response
java
public E getEdge( V source, V target ) { return directedGraph.getEdge( target, source ); }
python
def verbose_option(fn): """ Decorator to add a --verbose option to any click command. The value won't be passed down to the command, but rather handled in the callback. The value will be accessible through `peltak.core.context` under 'verbose' if the command needs it. To get the current value you can do: >>> from peltak.core import context >>> >>> pretend = context.get('verbose', False) This value will be accessible from anywhere in the code. """ def set_verbose(ctx, param, value): # pylint: disable=missing-docstring # type: (click.Context, str, Any) -> None from peltak.core import context context.set('verbose', value or 0) return click.option( '-v', '--verbose', is_flag=True, expose_value=False, callback=set_verbose, help="Be verbose. Can specify multiple times for more verbosity.", )(fn)
python
def __register_fully_extracted_warc_file(self, warc_url): """ Saves the URL warc_url in the log file for fully extracted WARC URLs :param warc_url: :return: """ with open(self.__log_pathname_fully_extracted_warcs, 'a') as log_file: log_file.write(warc_url + '\n')
python
def _delete(self): """Deletes this AssessmentSection from database. Will be called by AssessmentTaken._delete() for clean-up purposes. """ collection = JSONClientValidated('assessment', collection='AssessmentSection', runtime=self._runtime) collection.delete_one({'_id': ObjectId(self.get_id().get_identifier())})
java
public static PageSearchResult pageSearch(String accessToken, PageSearch pageSearch) { return pageSearch(accessToken, JsonUtil.toJSONString(pageSearch)); }
java
public static String parseWithMetric(final String metric, final HashMap<String, String> tags) { final int curly = metric.indexOf('{'); if (curly < 0) { return metric; } final int len = metric.length(); if (metric.charAt(len - 1) != '}') { // "foo{" throw new IllegalArgumentException("Missing '}' at the end of: " + metric); } else if (curly == len - 2) { // "foo{}" return metric.substring(0, len - 2); } // substring the tags out of "foo{a=b,...,x=y}" and parse them. for (final String tag : splitString(metric.substring(curly + 1, len - 1), ',')) { try { parse(tags, tag); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("When parsing tag '" + tag + "': " + e.getMessage()); } } // Return the "foo" part of "foo{a=b,...,x=y}" return metric.substring(0, curly); }
java
private static Matrix execute(Matrix dataMatrix, MatrixFile affMatrixFile, int dims) throws IOException { // Write the input matrix to a file for Matlab/Octave to use File mInput = File.createTempFile("lpp-input-data-matrix",".dat"); mInput.deleteOnExit(); MatrixIO.writeMatrix(dataMatrix, mInput, MatrixIO.Format.MATLAB_SPARSE); // Create an output matrix to hold the results of the computation from // Matlab until they can be read back into memory File output = File.createTempFile("lpp-output-matrix",".dat"); // Exceute the LPP code execute(mInput, affMatrixFile.getFile(), dims, output); // Upon finishing, read the matrix back into memory. return MatrixIO.readMatrix(output, MatrixIO.Format.DENSE_TEXT); }
java
public void animateProgress(int start, int end, int duration) { List<Boolean> list = new ArrayList<>(); list.add(true); mCirclePieceFillList = list; setProgress(0); AnimatorSet set = new AnimatorSet(); set.playTogether(Glider.glide(Skill.QuadEaseInOut, duration, ObjectAnimator.ofFloat(this, "progress", start, end))); set.setDuration(duration); set = addListenersToSet(set); set.start(); }
python
async def connect(self, conn_id, connection_string): """Connect to a device. See :meth:`AbstractDeviceAdapter.connect`. """ self._logger.info("Inside connect, conn_id=%d, conn_string=%s", conn_id, connection_string) try: self._setup_connection(conn_id, connection_string) resp = await self._execute(self._adapter.connect_sync, conn_id, connection_string) _raise_error(conn_id, 'connect', resp) except: self._teardown_connection(conn_id, force=True) raise
java
public IfcCondenserTypeEnum createIfcCondenserTypeEnumFromString(EDataType eDataType, String initialValue) { IfcCondenserTypeEnum result = IfcCondenserTypeEnum.get(initialValue); if (result == null) throw new IllegalArgumentException( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; }
java
public static String removePrefix(String principalName) { String result = principalName; if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(principalName)) { if (hasPrefix(principalName)) { result = principalName.trim().substring(I_CmsPrincipal.PRINCIPAL_GROUP.length() + 1); } } return result; }
java
public static Class getClassOfOpenEngSBModel(String clazz, String version, OsgiUtilsService serviceFinder) throws ClassNotFoundException { ModelRegistry registry = serviceFinder.getService(ModelRegistry.class); ModelDescription modelDescription = new ModelDescription(clazz, version); Class clazzObject = registry.loadModel(modelDescription); return clazzObject; }
java
public JSONBuilder serialize(String name, Object value) { return quote(name).append(':').serialize(value); }
java
public void marshall(ResultSetMetadata resultSetMetadata, ProtocolMarshaller protocolMarshaller) { if (resultSetMetadata == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resultSetMetadata.getColumnCount(), COLUMNCOUNT_BINDING); protocolMarshaller.marshall(resultSetMetadata.getColumnMetadata(), COLUMNMETADATA_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def _read_data_legacy(prefix, batch_size): """ Loads a tf record as tensors you can use. :param prefix: The path prefix as defined in the write data method. :param batch_size: The batch size you want for the tensors. :return: A feature tensor dict and a label tensor dict. """ prefix = prefix.replace("\\", "/") folder = "/".join(prefix.split("/")[:-1]) phase = prefix.split("/")[-1] config = json.load(open(prefix + '_config.json')) num_threads = config["num_threads"] filenames = [folder + "/" + f for f in listdir(folder) if isfile(join(folder, f)) and phase in f and not "config.json" in f] # Create a tf object for the filename list and the readers. filename_queue = tf.train.string_input_producer(filenames) readers = [_read_tf_record(filename_queue, config) for _ in range(num_threads)] batch_dict = tf.train.shuffle_batch_join( readers, batch_size=batch_size, capacity=10 * batch_size, min_after_dequeue=5 * batch_size ) # Add batch dimension to feature and label shape feature_batch = {} label_batch = {} for k in batch_dict.keys(): shape = tuple([batch_size] + list(config[k]["shape"])) tensor = tf.reshape(batch_dict[k], shape, name="input/"+phase+"/" + k + "_reshape") if "feature_" in k: feature_batch["_".join(k.split("_")[1:])] = tensor if "label_" in k: label_batch["_".join(k.split("_")[1:])] = tensor return feature_batch, label_batch
python
def match(self, expression=None, xpath=None, namespaces=None): """decorator that allows us to match by expression or by xpath for each transformation method""" class MatchObject(Dict): pass def _match(function): self.matches.append( MatchObject(expression=expression, xpath=xpath, function=function, namespaces=namespaces)) def wrapper(self, *args, **params): return function(self, *args, **params) return wrapper return _match
python
def make_mreq(family, address): """ Makes a mreq structure object for the given address and socket family. :param family: A socket family (AF_INET or AF_INET6) :param address: A multicast address (group) :raise ValueError: Invalid family or address """ if not address: raise ValueError("Empty address") # Convert the address to a binary form group_bin = pton(family, address) if family == socket.AF_INET: # IPv4 # struct ip_mreq # { # struct in_addr imr_multiaddr; /* IP multicast address of group */ # struct in_addr imr_interface; /* local IP address of interface */ # }; # "=I" : Native order, standard size unsigned int return group_bin + struct.pack("=I", socket.INADDR_ANY) elif family == socket.AF_INET6: # IPv6 # struct ipv6_mreq { # struct in6_addr ipv6mr_multiaddr; # unsigned int ipv6mr_interface; # }; # "@I" : Native order, native size unsigned int return group_bin + struct.pack("@I", 0) raise ValueError("Unknown family {0}".format(family))
python
def abort_submission(namespace, workspace, submission_id): """Abort running job in a workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name submission_id (str): Submission's unique identifier Swagger: https://api.firecloud.org/#!/Submissions/deleteSubmission """ uri = "workspaces/{0}/{1}/submissions/{2}".format(namespace, workspace, submission_id) return __delete(uri)
java
public static BulkheadMetricsCollector ofSupplier(MetricNames names, Supplier<? extends Iterable<? extends Bulkhead>> supplier) { return new BulkheadMetricsCollector(names, supplier); }
python
def get_random_item(enum: Any, rnd: Optional[Random] = None) -> Any: """Get random item of enum object. :param enum: Enum object. :param rnd: Custom random object. :return: Random item of enum. """ if rnd and isinstance(rnd, Random): return rnd.choice(list(enum)) return random_module.choice(list(enum))
python
def crl_managed(name, signing_private_key, signing_private_key_passphrase=None, signing_cert=None, revoked=None, days_valid=100, digest="", days_remaining=30, include_expired=False, **kwargs): ''' Manage a Certificate Revocation List name Path to the certificate signing_private_key The private key that will be used to sign this crl. This is usually your CA's private key. signing_private_key_passphrase Passphrase to decrypt the private key. signing_cert The certificate of the authority that will be used to sign this crl. This is usually your CA's certificate. revoked A list of certificates to revoke. Must include either a serial number or a the certificate itself. Can optionally include the revocation date and notAfter date from the certificate. See example below for details. days_valid : 100 The number of days the certificate should be valid for. digest The digest to use for signing the CRL. This has no effect on versions of pyOpenSSL less than 0.14. days_remaining : 30 The crl should be automatically recreated if there are less than ``days_remaining`` days until the crl expires. Set to 0 to disable automatic renewal. include_expired : False If ``True``, include expired certificates in the CRL. kwargs Any arguments supported by :py:func:`file.managed <salt.states.file.managed>` are supported. Example: .. code-block:: yaml /etc/pki/ca.crl: x509.crl_managed: - signing_private_key: /etc/pki/myca.key - signing_cert: /etc/pki/myca.crt - revoked: - compromized_Web_key: - certificate: /etc/pki/certs/badweb.crt - revocation_date: 2015-03-01 00:00:00 - reason: keyCompromise - terminated_vpn_user: - serial_number: D6:D2:DC:D8:4D:5C:C0:F4 - not_after: 2016-01-01 00:00:00 - revocation_date: 2015-02-25 00:00:00 - reason: cessationOfOperation ''' if revoked is None: revoked = [] revoked = _revoked_to_list(revoked) current_days_remaining = 0 current_comp = {} if os.path.isfile(name): try: current = __salt__['x509.read_crl'](crl=name) current_comp = current.copy() current_comp.pop('Last Update') current_notafter = current_comp.pop('Next Update') current_days_remaining = ( datetime.datetime.strptime(current_notafter, '%Y-%m-%d %H:%M:%S') - datetime.datetime.now()).days if days_remaining == 0: days_remaining = current_days_remaining - 1 except salt.exceptions.SaltInvocationError: current = '{0} is not a valid CRL.'.format(name) else: current = '{0} does not exist.'.format(name) new_crl = __salt__['x509.create_crl'](text=True, signing_private_key=signing_private_key, signing_private_key_passphrase=signing_private_key_passphrase, signing_cert=signing_cert, revoked=revoked, days_valid=days_valid, digest=digest, include_expired=include_expired) new = __salt__['x509.read_crl'](crl=new_crl) new_comp = new.copy() new_comp.pop('Last Update') new_comp.pop('Next Update') file_args, kwargs = _get_file_args(name, **kwargs) new_crl_created = False if (current_comp == new_comp and current_days_remaining > days_remaining and __salt__['x509.verify_crl'](name, signing_cert)): file_args['contents'] = __salt__[ 'x509.get_pem_entry'](name, pem_type='X509 CRL') else: new_crl_created = True file_args['contents'] = new_crl ret = __states__['file.managed'](**file_args) if new_crl_created: ret['changes'] = {'Old': current, 'New': __salt__[ 'x509.read_crl'](crl=new_crl)} return ret
python
def render(self, mode=None, vertices=-1, *, first=0, instances=1) -> None: ''' The render primitive (mode) must be the same as the input primitive of the GeometryShader. Args: mode (int): By default :py:data:`TRIANGLES` will be used. vertices (int): The number of vertices to transform. Keyword Args: first (int): The index of the first vertex to start with. instances (int): The number of instances. ''' if mode is None: mode = TRIANGLES self.mglo.render(mode, vertices, first, instances)
python
def _loadData(self, data): """ Load attribute values from Plex XML response. """ self._data = data self._token = logfilter.add_secret(data.attrib.get('authenticationToken')) self._webhooks = [] self.authenticationToken = self._token self.certificateVersion = data.attrib.get('certificateVersion') self.cloudSyncDevice = data.attrib.get('cloudSyncDevice') self.email = data.attrib.get('email') self.guest = utils.cast(bool, data.attrib.get('guest')) self.home = utils.cast(bool, data.attrib.get('home')) self.homeSize = utils.cast(int, data.attrib.get('homeSize')) self.id = data.attrib.get('id') self.locale = data.attrib.get('locale') self.mailing_list_status = data.attrib.get('mailing_list_status') self.maxHomeSize = utils.cast(int, data.attrib.get('maxHomeSize')) self.queueEmail = data.attrib.get('queueEmail') self.queueUid = data.attrib.get('queueUid') self.restricted = utils.cast(bool, data.attrib.get('restricted')) self.scrobbleTypes = data.attrib.get('scrobbleTypes') self.secure = utils.cast(bool, data.attrib.get('secure')) self.thumb = data.attrib.get('thumb') self.title = data.attrib.get('title') self.username = data.attrib.get('username') self.uuid = data.attrib.get('uuid') subscription = data.find('subscription') self.subscriptionActive = utils.cast(bool, subscription.attrib.get('active')) self.subscriptionStatus = subscription.attrib.get('status') self.subscriptionPlan = subscription.attrib.get('plan') self.subscriptionFeatures = [] for feature in subscription.iter('feature'): self.subscriptionFeatures.append(feature.attrib.get('id')) roles = data.find('roles') self.roles = [] if roles: for role in roles.iter('role'): self.roles.append(role.attrib.get('id')) entitlements = data.find('entitlements') self.entitlements = [] for entitlement in entitlements.iter('entitlement'): self.entitlements.append(entitlement.attrib.get('id')) # TODO: Fetch missing MyPlexAccount attributes self.profile_settings = None self.services = None self.joined_at = None
java
@SuppressWarnings("unchecked") @Override public EList<CompareItem> getItems() { return (EList<CompareItem>) eGet(StorePackage.Literals.COMPARE_CONTAINER__ITEMS, true); }
python
def resize(fname, basewidth, opFilename): """ resize an image to basewidth """ if basewidth == 0: basewidth = 300 img = Image.open(fname) wpercent = (basewidth/float(img.size[0])) hsize = int((float(img.size[1])*float(wpercent))) img = img.resize((basewidth,hsize), Image.ANTIALIAS) img.save(opFilename)
java
public static JavaRDD<DataSet> shuffleExamples(JavaRDD<DataSet> rdd, int newBatchSize, int numPartitions) { //Step 1: split into individual examples, mapping to a pair RDD (random key in range 0 to numPartitions) JavaPairRDD<Integer, DataSet> singleExampleDataSets = rdd.flatMapToPair(new SplitDataSetExamplesPairFlatMapFunction(numPartitions)); //Step 2: repartition according to the random keys singleExampleDataSets = singleExampleDataSets.partitionBy(new HashPartitioner(numPartitions)); //Step 3: Recombine return singleExampleDataSets.values().mapPartitions(new BatchDataSetsFunction(newBatchSize)); }
python
def parse(self, argument): """Determine validity of argument and return the correct element of enum. If self.enum_values is empty, then all arguments are valid and argument will be returned. Otherwise, if argument matches an element in enum, then the first matching element will be returned. Args: argument: The supplied flag value. Returns: The matching element from enum_values, or argument if enum_values is empty. Raises: ValueError: enum_values was non-empty, but argument didn't match anything in enum. """ if not self.enum_values: return argument elif self.case_sensitive: if argument not in self.enum_values: raise ValueError('value should be one of <%s>' % '|'.join(self.enum_values)) else: return argument else: if argument.upper() not in [value.upper() for value in self.enum_values]: raise ValueError('value should be one of <%s>' % '|'.join(self.enum_values)) else: return [value for value in self.enum_values if value.upper() == argument.upper()][0]
python
def tensor(x:Any, *rest)->Tensor: "Like `torch.as_tensor`, but handle lists too, and can pass multiple vector elements directly." if len(rest): x = (x,)+rest # XXX: Pytorch bug in dataloader using num_workers>0; TODO: create repro and report if is_listy(x) and len(x)==0: return tensor(0) res = torch.tensor(x) if is_listy(x) else as_tensor(x) if res.dtype is torch.int32: warn('Tensor is int32: upgrading to int64; for better performance use int64 input') return res.long() return res
python
def run( self, for_time=None ): """ Run the simulation. Args: for_time (:obj:Float, optional): If `for_time` is set, then run the simulation until a set amount of time has passed. Otherwise, run the simulation for a set number of jumps. Defaults to None. Returns: None """ self.for_time = for_time try: self.is_initialised() except AttributeError: raise if self.number_of_equilibration_jumps > 0: for step in range( self.number_of_equilibration_jumps ): self.lattice.jump() self.reset() if self.for_time: self.number_of_jumps = 0 while self.lattice.time < self.for_time: self.lattice.jump() self.number_of_jumps += 1 else: for step in range( self.number_of_jumps ): self.lattice.jump() self.has_run = True
python
def _can_process_pre_prepare(self, pre_prepare: PrePrepare, sender: str) -> Optional[int]: """ Decide whether this replica is eligible to process a PRE-PREPARE. :param pre_prepare: a PRE-PREPARE msg to process :param sender: the name of the node that sent the PRE-PREPARE msg """ # TODO: Check whether it is rejecting PRE-PREPARE from previous view # PRE-PREPARE should not be sent from non primary if not self.isMsgFromPrimary(pre_prepare, sender): return PP_CHECK_NOT_FROM_PRIMARY # Already has a PRE-PREPARE with same 3 phase key if (pre_prepare.viewNo, pre_prepare.ppSeqNo) in self.prePrepares: return PP_CHECK_DUPLICATE if not self.is_pre_prepare_time_acceptable(pre_prepare, sender): return PP_CHECK_WRONG_TIME if compare_3PC_keys((pre_prepare.viewNo, pre_prepare.ppSeqNo), self.__last_pp_3pc) > 0: return PP_CHECK_OLD # ignore old pre-prepare if self.nonFinalisedReqs(pre_prepare.reqIdr): return PP_CHECK_REQUEST_NOT_FINALIZED if not self.__is_next_pre_prepare(pre_prepare.viewNo, pre_prepare.ppSeqNo): return PP_CHECK_NOT_NEXT if f.POOL_STATE_ROOT_HASH.nm in pre_prepare and \ pre_prepare.poolStateRootHash != self.stateRootHash(POOL_LEDGER_ID): return PP_CHECK_INCORRECT_POOL_STATE_ROOT # BLS multi-sig: status = self._bls_bft_replica.validate_pre_prepare(pre_prepare, sender) if status is not None: return status return None
python
def create_host(url="local://local:6666/host"): ''' This is the main function to create a new Host to which you can spawn actors. It will be set by default at local address if no parameter *url* is given, which would result in remote incomunication between hosts. This function shuould be called once for execution or after callig :meth:`~.shutdown` to the previous host. Nevertheless, it is possible to create locally more than one host and simulate a remote communication between them if they are of some remote type (`http` or `amqp`), but the first one created will be the main host, which is the one that will host the queries from the main function. Of course, every host must be initialized with a diferent URL(port). Although that, more than one host should not be requiered for any real project. :param str. url: URL where to start and bind the host. :return: :class:`~.Proxy` to the new host created. :raises: Exception if there is a host already created with that URL. ''' if url in util.hosts.keys(): raise HostError('Host already created. Only one host can' + ' be ran with the same url.') else: if not util.hosts: util.main_host = Host(url) util.hosts[url] = util.main_host else: util.hosts[url] = Host(url) return util.hosts[url].proxy
java
public DescribeMLModelsResult withResults(MLModel... results) { if (this.results == null) { setResults(new com.amazonaws.internal.SdkInternalList<MLModel>(results.length)); } for (MLModel ele : results) { this.results.add(ele); } return this; }
python
def dialog_checklist(self): """Create checklist to choose packages for upgrade """ data = [] for upg in self.upgrade_all: data.append(upg[:-4]) text = "Press 'spacebar' to unchoose packages from upgrade" title = " Upgrade " backtitle = "{0} {1}".format(self.meta.__all__, self.meta.__version__) status = True pkgs = DialogUtil(data, text, title, backtitle, status).checklist() index = 0 for pkg, comp, uncomp in zip(self.upgrade_all, self.comp_sum, self.uncomp_sum): if pkg[:-4] not in pkgs: self.dwn_links.pop(index) self.upgrade_all.pop(index) self.comp_sum.pop(index) self.uncomp_sum.pop(index) self.count_upg -= 1 del comp, uncomp index -= 1 index += 1 if not self.upgrade_all: raise SystemExit()
python
def save_ip_ranges(profile_name, prefixes, force_write, debug, output_format = 'json'): """ Creates/Modifies an ip-range-XXX.json file :param profile_name: :param prefixes: :param force_write: :param debug: :return: """ filename = 'ip-ranges-%s.json' % profile_name ip_ranges = {} ip_ranges['createDate'] = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') # Unique prefixes unique_prefixes = {} for prefix in prefixes: if type(prefix) == dict: unique_prefixes[prefix['ip_prefix']] = prefix else: unique_prefixes[prefix] = {'ip_prefix': prefix} unique_prefixes = list(unique_prefixes.values()) ip_ranges['prefixes'] = unique_prefixes if output_format == 'json': save_blob_as_json(filename, ip_ranges, force_write, debug) else: # Write as CSV output = 'account_id, region, ip, instance_id, instance_name\n' for prefix in unique_prefixes: output += '%s, %s, %s, %s, %s\n' % (prefix['account_id'], prefix['region'], prefix['ip_prefix'], prefix['instance_id'], prefix['name']) with open('ip-ranges-%s.csv' % profile_name, 'wt') as f: f.write(output)
java
static boolean modifierIsAcceptable(Element item) { // kotlin define properties as final Object[] values = { Modifier.NATIVE, Modifier.STATIC, /* Modifier.FINAL, */ Modifier.ABSTRACT }; for (Object i : values) { if (item.getModifiers().contains(i)) return false; } return true; }
python
def is_all_field_none(self): """ :rtype: bool """ if self._uuid is not None: return False if self._amount_inquired is not None: return False if self._alias is not None: return False if self._description is not None: return False if self._status is not None: return False if self._redirect_url is not None: return False if self._merchant_available is not None: return False return True
python
def wait_for_keypress(self, key, modifiers: list=None, timeOut=10.0): """ Wait for a keypress or key combination Usage: C{keyboard.wait_for_keypress(self, key, modifiers=[], timeOut=10.0)} Note: this function cannot be used to wait for modifier keys on their own @param key: they key to wait for @param modifiers: list of modifiers that should be pressed with the key @param timeOut: maximum time, in seconds, to wait for the keypress to occur """ if modifiers is None: modifiers = [] w = iomediator.Waiter(key, modifiers, None, timeOut) return w.wait()
java
@Override public void putAttribute(String key, AttributeValue value) { Utils.checkNotNull(key, "key"); Utils.checkNotNull(value, "value"); }
python
def middle_frame(obj): "Only display the (approximately) middle frame of an animated plot" plot, renderer, fmt = single_frame_plot(obj) middle_frame = int(len(plot) / 2) plot.update(middle_frame) return {'text/html': renderer.html(plot, fmt)}
python
def getsourcefile(object): """Return the Python source file an object was defined in, if it exists.""" filename = getfile(object) if string.lower(filename[-4:]) in ['.pyc', '.pyo']: filename = filename[:-4] + '.py' for suffix, mode, kind in imp.get_suffixes(): if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix: # Looks like a binary file. We want to only return a text file. return None if os.path.exists(filename): return filename
java
public void setMonitorPlugin(MonitorPlugin plugin) throws TooManyListenersException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "setMonitorPlugin", plugin); } if (this.monitorPlugin != null && !this.monitorPlugin.equals(plugin)) { throw new TooManyListenersException("INTERNAL ERROR: ThreadPool.MonitorPlugin already set."); } this.monitorPlugin = plugin; if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "setMonitorPlugin", plugin); } }
java
private static int doUnescape( String s, int i, StringBuilder sb, ErrorReporter errorReporter, SourceLocation loc) { checkArgument(i < s.length(), "Found escape sequence at the end of a string."); char c = s.charAt(i++); switch (c) { case 'n': sb.append('\n'); break; case 'r': sb.append('\r'); break; case 't': sb.append('\t'); break; case 'b': sb.append('\b'); break; case 'f': sb.append('\f'); break; case '\\': case '\"': case '\'': case '>': sb.append(c); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': --i; // backup to first octal digit int nOctalDigits = 1; int digitLimit = c < '4' ? 3 : 2; while (nOctalDigits < digitLimit && i + nOctalDigits < s.length() && isOctal(s.charAt(i + nOctalDigits))) { ++nOctalDigits; } sb.append((char) Integer.parseInt(s.substring(i, i + nOctalDigits), 8)); i += nOctalDigits; break; case 'x': case 'u': String hexCode; int nHexDigits = (c == 'u' ? 4 : 2); try { hexCode = s.substring(i, i + nHexDigits); } catch (IndexOutOfBoundsException ioobe) { errorReporter.report(loc.offsetStartCol(i + 1), INVALID_UNICODE_SEQUENCE, s.substring(i)); return i + nHexDigits; } int unicodeValue; try { unicodeValue = Integer.parseInt(hexCode, 16); } catch (NumberFormatException nfe) { errorReporter.report(loc.offsetStartCol(i + 1), INVALID_UNICODE_SEQUENCE, hexCode); return i + nHexDigits; } sb.append((char) unicodeValue); i += nHexDigits; break; default: errorReporter.report(loc.offsetStartCol(i), UNKNOWN_ESCAPE_CODE, c); return i; } return i; }
java
public void marshall(SecretListEntry secretListEntry, ProtocolMarshaller protocolMarshaller) { if (secretListEntry == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(secretListEntry.getARN(), ARN_BINDING); protocolMarshaller.marshall(secretListEntry.getName(), NAME_BINDING); protocolMarshaller.marshall(secretListEntry.getDescription(), DESCRIPTION_BINDING); protocolMarshaller.marshall(secretListEntry.getKmsKeyId(), KMSKEYID_BINDING); protocolMarshaller.marshall(secretListEntry.getRotationEnabled(), ROTATIONENABLED_BINDING); protocolMarshaller.marshall(secretListEntry.getRotationLambdaARN(), ROTATIONLAMBDAARN_BINDING); protocolMarshaller.marshall(secretListEntry.getRotationRules(), ROTATIONRULES_BINDING); protocolMarshaller.marshall(secretListEntry.getLastRotatedDate(), LASTROTATEDDATE_BINDING); protocolMarshaller.marshall(secretListEntry.getLastChangedDate(), LASTCHANGEDDATE_BINDING); protocolMarshaller.marshall(secretListEntry.getLastAccessedDate(), LASTACCESSEDDATE_BINDING); protocolMarshaller.marshall(secretListEntry.getDeletedDate(), DELETEDDATE_BINDING); protocolMarshaller.marshall(secretListEntry.getTags(), TAGS_BINDING); protocolMarshaller.marshall(secretListEntry.getSecretVersionsToStages(), SECRETVERSIONSTOSTAGES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def run(argv=argv): """Runs the search_google command line tool. This function runs the search_google command line tool in a terminal. It was intended for use inside a py file (.py) to be executed using python. Notes: * ``[q]`` reflects key ``q`` in the ``cseargs`` parameter for :class:`api.results` * Optional arguments with ``build_`` are keys in the ``buildargs`` parameter for :class:`api.results` For distribution, this function must be defined in the following files:: # In 'search_google/search_google/__main__.py' from .cli import run run() # In 'search_google/search_google.py' from search_google.cli import run if __name__ == '__main__': run() # In 'search_google/__init__.py' __entry_points__ = {'console_scripts': ['search_google=search_google.cli:run']} Examples:: # Import google_streetview for the cli module import search_google.cli # Create command line arguments argv = [ 'cli.py', 'google', '--searchType=image', '--build_developerKey=your_dev_key', '--cx=your_cx_id' '--num=1' ] # Run command line search_google.cli.run(argv) """ config_file = kwconfig.manage( file_path=resource_filename(Requirement.parse('search_google'), 'search_google/config.json'), defaults={ 'build_serviceName': 'customsearch', 'build_version': 'v1', 'num': 3, 'fileType': 'png', 'option_silent': 'False', 'option_preview' : 10}) # (commands) Main command calls if len(argv) > 1: if argv[1] == '-i': # browse docs open_new_tab(_doc_link) exit() elif argv[1] == '-a': # browse arguments open_new_tab(_cse_link) exit() config_file.command(argv, i=1, doc=__doc__, quit=True, silent=False) # (parse_args) Parse command arguments into dict kwargs = kwconfig.parse(argv[2:]) kwargs['q'] = argv[1] kwargs = config_file.add(kwargs) # (split_args) Split args into build, cse, and save arguments buildargs = {} cseargs = {} saveargs = {} optionargs = {} for k, v in kwargs.items(): if 'build_' == k[0:6]: buildargs[k[6:]] = v elif 'save_' == k[0:5]: saveargs[k[5:]] = v elif 'option_' == k[0:7]: optionargs[k[7:]] = v else: cseargs[k] = v # (cse_results) Get google api results results = search_google.api.results(buildargs, cseargs) # (cse_print) Print a preview of results if 'silent' in optionargs: if optionargs['silent'].lower() != 'true': results.preview(n=int(optionargs['preview'])) # (cse_save) Save links and metadata if 'links' in saveargs: results.save_links(saveargs['links']) if 'metadata' in saveargs: results.save_metadata(saveargs['metadata']) # (cse_download) Download links if 'downloads' in saveargs: results.download_links(saveargs['downloads'])
java
public static <T> Spliterator<T> spliterator(Object[] array, int additionalCharacteristics) { return new ArraySpliterator<>(Objects.requireNonNull(array), additionalCharacteristics); }
python
def get_connection(self, alias='default'): """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we assume it's already a client instance and will just return it as-is. Raises ``KeyError`` if no client (or its definition) is registered under the alias. """ # do not check isinstance(Elasticsearch) so that people can wrap their # clients if not isinstance(alias, string_types): return alias # connection already established try: return self._conns[alias] except KeyError: pass # if not, try to create it try: return self.create_connection(alias, **self._kwargs[alias]) except KeyError: # no connection and no kwargs to set one up raise KeyError('There is no connection with alias %r.' % alias)
java
public void setRow(int i, double value) { VectorIterator it = iteratorOfRow(i); while (it.hasNext()) { it.next(); it.set(value); } }
python
def dispatch(self): """Wraps the dispatch method to add session support.""" try: webapp2.RequestHandler.dispatch(self) finally: self.session_store.save_sessions(self.response)
python
def _init_xml(self, body): """ Parse the present body as xml """ tree = etree.fromstring(body.encode(self.encoding), PARSER) # Extract and replace inner DIDL xml in tags for text in tree.xpath('.//text()[contains(., "DIDL")]'): item = text.getparent() didl_tree = etree.fromstring(item.text) if self.external_inner_xml: item.text = 'DIDL_REPLACEMENT_{0}'.format(len(self.inner_xml)) self.inner_xml.append(didl_tree) else: item.text = None item.append(didl_tree) # Extract and replace inner DIDL xml in properties in inner xml for inner_tree in self.inner_xml: for item in inner_tree.xpath('//*[contains(@val, "DIDL")]'): if self.external_inner_xml: didl_tree = etree.fromstring(item.attrib['val']) item.attrib['val'] = 'DIDL_REPLACEMENT_{0}'.\ format(len(self.inner_xml)) self.inner_xml.append(didl_tree) self.body_formatted = etree.tostring(tree, pretty_print=True).decode( self.encoding)
java
public static NodeSequence withNodeKeys( final Iterator<NodeKey> keys, final long keyCount, final float score, final String workspaceName, final NodeCache cache ) { assert keyCount >= -1; if (keys == null) return emptySequence(1); return withBatch(batchOfKeys(keys, keyCount, score, workspaceName, cache)); }
java
@Override public String getContext() { final AbstractSchemaNode parent = getProperty(SchemaMethod.schemaNode); final StringBuilder buf = new StringBuilder(); if (parent != null) { buf.append(parent.getProperty(SchemaNode.name)); buf.append("."); buf.append(getProperty(name)); } return buf.toString(); }
python
def _new(self, dx_hash, **kwargs): ''' :param dx_hash: Standard hash populated in :func:`dxpy.bindings.DXDataObject.new()` containing attributes common to all data object classes. :type dx_hash: dict :param runSpec: Run specification :type runSpec: dict :param dxapi: API version string :type dxapi: string :param inputSpec: Input specification (optional) :type inputSpec: dict :param outputSpec: Output specification (optional) :type outputSpec: dict :param access: Access specification (optional) :type access: dict :param title: Title string (optional) :type title: string :param summary: Summary string (optional) :type summary: string :param description: Description string (optional) :type description: string .. note:: It is highly recommended that the higher-level module :mod:`dxpy.app_builder` or (preferably) its frontend `dx build <https://wiki.dnanexus.com/Command-Line-Client/Index-of-dx-Commands#build>`_ be used instead for applet creation. Creates an applet with the given parameters. See the API documentation for the `/applet/new <https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method:-/applet/new>`_ method for more info. The applet is not run until :meth:`run()` is called. ''' for field in 'runSpec', 'dxapi': if field not in kwargs: raise DXError("%s: Keyword argument %s is required" % (self.__class__.__name__, field)) dx_hash[field] = kwargs[field] del kwargs[field] for field in 'inputSpec', 'outputSpec', 'access', 'title', 'summary', 'description': if field in kwargs: dx_hash[field] = kwargs[field] del kwargs[field] resp = dxpy.api.applet_new(dx_hash, **kwargs) self.set_ids(resp["id"], dx_hash["project"])
java
public Double append( Double s1, Double s2 ) { if ( s1 == null || s2 == null ) { return null; } return s1 + s2; }
java
public HBCIExecStatus execute(boolean closeDialog) { HBCIExecStatus ret = new HBCIExecStatus(); log.debug("executing dialog"); try { ret.setDialogStatus(doIt(closeDialog)); } catch (Exception e) { ret.addException(e); } return ret; }
python
def get_namespace(self, namespace, lowercase=True, trim_namespace=True): """Returns a dictionary containing a subset of configuration options that match the specified namespace/prefix. Example usage: app.config['IMAGE_STORE_TYPE']='fs' app.config['IMAGE_STORE_PATH']='/var/app/images' app.config['IMAGE_STORE_BASE_URL']='http://img.website.com' The result dictionary `image_store` would look like: { 'type': 'fs', 'path': '/var/app/images', 'base_url':'http://image.website.com' } This is often useful when configuration options map directly to keyword arguments in functions or class constructors. :param namespace: a configuration namespace :param lowercase: a flag indicating if the keys of the resulting dictionary should be lowercase :param trim_namespace: a flag indicating if the keys of the resulting dictionary should not include the namespace :return: a dict instance """ rv = {} for key, value in six.iteritems(self): if not key.startswith(namespace): continue if trim_namespace: key = key[len(namespace):] else: key = key if lowercase: key = key.lower() rv[key] = value return rv