language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def list_sas(self, filters=None): """Retrieve active IKE_SAs and associated CHILD_SAs. :param filters: retrieve only matching IKE_SAs (optional) :type filters: dict :return: list of active IKE_SAs and associated CHILD_SAs :rtype: list """ _, sa_list = self.handler.streamed_request("list-sas", "list-sa", filters) return sa_list
python
def __validInputs(self): """Validates the inputs of the constructor.""" #if not isinstance(self.__column, Column): # raise Sitools2Exception("column must be an instance of Column") try: float(self.__minVal) except ValueError as ex: raise Sitools2Exception(ex) try: float(self.__maxVal) except ValueError as ex: raise Sitools2Exception(ex) if float(self.__minVal) >= float(self.__maxVal): raise Sitools2Exception("maxVal must be superior to minVal")
java
public <R extends BProgramRunnerListener> R addListener(R aListener) { listeners.add(aListener); return aListener; }
java
public static STSClient createSTSClient(Bus bus, Map<String, String> stsProps) { STSClient stsClient = createClient(bus, stsProps); stsClient.setWsdlLocation(stsProps.get(STS_WSDL_LOCATION)); stsClient.setEndpointQName(new QName(stsProps.get(STS_NAMESPACE), stsProps.get(STS_ENDPOINT_NAME))); return stsClient; }
python
def get_constraints(clusters, quota=(1, 1), Nmax=0): """ Check pairwise cluster comparison, if they overlap then mark edge as conflict """ qa, qb = quota eclusters = make_range(clusters, extend=-Nmax) # (1-based index, cluster score) nodes = [(i+1, c[-1]) for i, c in enumerate(eclusters)] eclusters_x, eclusters_y, scores = zip(*eclusters) # represents the contraints over x-axis and y-axis constraints_x = get_1D_overlap(eclusters_x, qa) constraints_y = get_1D_overlap(eclusters_y, qb) return nodes, constraints_x, constraints_y
java
public void marshall(FilteredLogEvent filteredLogEvent, ProtocolMarshaller protocolMarshaller) { if (filteredLogEvent == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(filteredLogEvent.getLogStreamName(), LOGSTREAMNAME_BINDING); protocolMarshaller.marshall(filteredLogEvent.getTimestamp(), TIMESTAMP_BINDING); protocolMarshaller.marshall(filteredLogEvent.getMessage(), MESSAGE_BINDING); protocolMarshaller.marshall(filteredLogEvent.getIngestionTime(), INGESTIONTIME_BINDING); protocolMarshaller.marshall(filteredLogEvent.getEventId(), EVENTID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
private synchronized ApptentiveNotificationObserverList resolveObserverList(String name) { ApptentiveNotificationObserverList list = observerListLookup.get(name); if (list == null) { list = new ApptentiveNotificationObserverList(); observerListLookup.put(name, list); } return list; }
java
@Override public void readFragmentFrom(MwsReader r) { sellerId = r.read("SellerId", String.class); createdAfter = r.read("CreatedAfter", XMLGregorianCalendar.class); createdBefore = r.read("CreatedBefore", XMLGregorianCalendar.class); lastUpdatedAfter = r.read("LastUpdatedAfter", XMLGregorianCalendar.class); lastUpdatedBefore = r.read("LastUpdatedBefore", XMLGregorianCalendar.class); orderStatus = r.readList("OrderStatus", "Status", String.class); marketplaceId = r.readList("MarketplaceId", "Id", String.class); fulfillmentChannel = r.readList("FulfillmentChannel", "Channel", String.class); paymentMethod = r.readList("PaymentMethod", "Method", String.class); buyerEmail = r.read("BuyerEmail", String.class); sellerOrderId = r.read("SellerOrderId", String.class); maxResultsPerPage = r.read("MaxResultsPerPage", Integer.class); tfmShipmentStatus = r.readList("TFMShipmentStatus", "Status", String.class); }
python
def add(self, anchor): """Add a new anchor to the repository. This will create a new ID for the anchor and provision new storage for it. Returns: The storage ID for the Anchor which can be used to retrieve the anchor later. """ anchor_id = uuid.uuid4().hex anchor_path = self._anchor_path(anchor_id) with anchor_path.open(mode='wt') as f: save_anchor(f, anchor, self.root) return anchor_id
java
private void encodeIntern(final LNGVector<Literal> vars) { if (vars.size() <= this.groupSize) for (int i = 0; i + 1 < vars.size(); i++) for (int j = i + 1; j < vars.size(); j++) this.result.addClause(vars.get(i).negate(), vars.get(j).negate()); else { final LNGVector<Literal> l1 = new LNGVector<>(vars.size() / 2); final LNGVector<Literal> l2 = new LNGVector<>(vars.size() / 2); int i = 0; for (; i < vars.size() / 2; i++) l1.push(vars.get(i)); for (; i < vars.size(); i++) l2.push(vars.get(i)); final Variable newVariable = this.result.newVariable(); l1.push(newVariable); l2.push(newVariable.negate()); this.encodeIntern(l1); this.encodeIntern(l2); } }
python
def conf_as_dict(conf_filename, encoding=None, case_sensitive=False): """ 读入 ini 配置文件,返回根据配置文件内容生成的字典类型变量; :param: * conf_filename: (string) 需要读入的 ini 配置文件长文件名 * encoding: (string) 文件编码 * case_sensitive: (bool) 是否大小写敏感,默认为 False :return: * flag: (bool) 读取配置文件是否正确,正确返回 True,错误返回 False * d: (dict) 如果读取配置文件正确返回的包含配置文件内容的字典,字典内容顺序与配置文件顺序保持一致 * count: (int) 读取到的配置文件有多少个 key 的数量 举例如下:: print('--- conf_as_dict demo---') # 定义配置文件名 conf_filename = 'test_conf.ini' # 读取配置文件 ds = conf_as_dict(conf_filename) ds1 = conf_as_dict(conf_filename, case_sensitive=True) # 显示是否成功,所有 dict 的内容,dict 的 key 数量 print('flag:', ds[0]) print('dict:', ds[1]) print('length:', ds[2]) d = ds[1] d1 = ds1[1] # 显示一个 section 下的所有内容 print('section show_opt:', d['show_opt']) # 显示一个 section 下的所有内容,大小写敏感 print('section show_opt:', d1['show_opt']) # 显示一个 section 下面的 key 的 value 内容 print('section show_opt, key short_opt:', d['show_opt']['short_opt']) # 读取一个复杂的section,先读出 key 中的 count 内容,再遍历每个 key 的 value i = int(d['get_extra_rules']['erule_count']) print('section get_extra_rules, key erule_count:', i) for j in range(i): print('section get_extra_rules, key erule_type:', d['get_extra_rules']['erule_'+str(j)]) print('---') 执行结果:: --- conf_as_dict demo--- flag: True dict: (omit) length: 7 section show_opt: {'short_opt': 'b:d:v:p:f:', 'long_opt': 'region=,prov=,mer_id=,mer_short_name=,web_status='} section show_opt: {'Short_Opt': 'b:d:v:p:f:', 'Long_Opt': 'region=,prov=,mer_id=,mer_short_name=,web_status='} section show_opt, key short_opt: b:d:v:p:f: section get_extra_rules, key erule_count: 2 section get_extra_rules, key erule_type: extra_rule_1 section get_extra_rules, key erule_type: extra_rule_2 --- """ flag = False # 检查文件是否存在 if not pathlib.Path(conf_filename).is_file(): return flag, # 判断是否对大小写敏感 cf = configparser.ConfigParser() if not case_sensitive else MyConfigParser() # 读入 config 文件 try: if sys.version > '3': cf.read(conf_filename, encoding=encoding) else: cf.read(conf_filename) except: flag = False return flag, d = OrderedDict(cf._sections) for k in d: d[k] = OrderedDict(cf._defaults, **d[k]) d[k].pop('__name__', None) flag = True # 计算有多少 key count = len(d.keys()) return flag, d, count
java
public SemanticVersion findSupportingVersion(SemanticVersion... versions) { for (SemanticVersion version : versions) { if (isSupportedBy(version)) return version; } return null; }
python
def cut(self, sentence, cut_all=False, HMM=True): ''' The main function that segments an entire sentence that contains Chinese characters into separated words. Parameter: - sentence: The str(unicode) to be segmented. - cut_all: Model type. True for full pattern, False for accurate pattern. - HMM: Whether to use the Hidden Markov Model. ''' sentence = strdecode(sentence) if cut_all: re_han = re_han_cut_all re_skip = re_skip_cut_all else: re_han = re_han_default re_skip = re_skip_default if cut_all: cut_block = self.__cut_all elif HMM: cut_block = self.__cut_DAG else: cut_block = self.__cut_DAG_NO_HMM blocks = re_han.split(sentence) for blk in blocks: if not blk: continue if re_han.match(blk): for word in cut_block(blk): yield word else: tmp = re_skip.split(blk) for x in tmp: if re_skip.match(x): yield x elif not cut_all: for xx in x: yield xx else: yield x
python
def render(self, filename=None, directory=None, view=False, cleanup=False, format=None, renderer=None, formatter=None): """Save the source to file and render with the Graphviz engine. Args: filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``) directory: (Sub)directory for source saving and rendering. view (bool): Open the rendered result with the default application. cleanup (bool): Delete the source file after rendering. format: The output format used for rendering (``'pdf'``, ``'png'``, etc.). renderer: The output renderer used for rendering (``'cairo'``, ``'gd'``, ...). formatter: The output formatter used for rendering (``'cairo'``, ``'gd'``, ...). Returns: The (possibly relative) path of the rendered file. Raises: ValueError: If ``format``, ``renderer``, or ``formatter`` are not known. graphviz.RequiredArgumentError: If ``formatter`` is given but ``renderer`` is None. graphviz.ExecutableNotFound: If the Graphviz executable is not found. subprocess.CalledProcessError: If the exit status is non-zero. RuntimeError: If viewer opening is requested but not supported. """ filepath = self.save(filename, directory) if format is None: format = self._format rendered = backend.render(self._engine, format, filepath, renderer, formatter) if cleanup: os.remove(filepath) if view: self._view(rendered, self._format) return rendered
java
@Override public final <WB extends WaveBean> Wave returnData(final Class<? extends Service> serviceClass, final WaveType waveType, final WB waveBean) { return sendWaveIntoJit(createWave(WaveGroup.RETURN_DATA, waveType, serviceClass, waveBean)); }
java
public ListIdentitiesResult withIdentities(IdentityDescription... identities) { if (this.identities == null) { setIdentities(new java.util.ArrayList<IdentityDescription>(identities.length)); } for (IdentityDescription ele : identities) { this.identities.add(ele); } return this; }
python
def evaluate_inkml(inkml_file_path): """Evaluate an InkML file. Parameters ---------- inkml_file_path : string path to an InkML file Returns ------- dictionary The dictionary contains the keys 'filename' and 'results', where 'results' itself is a list of dictionaries. Each of the results has the keys 'semantics' (which contains the latex command) and 'probability' """ logging.info("Start evaluating '%s'...", inkml_file_path) ret = {'filename': inkml_file_path} recording = inkml.read(inkml_file_path) results = evaluate(json.dumps(recording.get_sorted_pointlist()), result_format='LaTeX') ret['results'] = results return ret
java
@Override public ListT<W,T> onEmptyGet(final Supplier<? extends T> supplier) { return (ListT<W,T>) FoldableTransformerSeq.super.onEmptyGet(supplier); }
java
public static ProtocolNegotiator serverTls(final SslContext sslContext) { Preconditions.checkNotNull(sslContext, "sslContext"); return new ProtocolNegotiator() { @Override public ChannelHandler newHandler(GrpcHttp2ConnectionHandler handler) { return new ServerTlsHandler(sslContext, handler); } @Override public void close() {} @Override public AsciiString scheme() { return Utils.HTTPS; } }; }
java
private static SecuredCEK secureCEK(SecretKey cek, EncryptionMaterials materials, S3KeyWrapScheme kwScheme, SecureRandom srand, Provider p, AWSKMS kms, AmazonWebServiceRequest req) { final Map<String,String> matdesc; if (materials.isKMSEnabled()) { matdesc = mergeMaterialDescriptions(materials, req); EncryptRequest encryptRequest = new EncryptRequest() .withEncryptionContext(matdesc) .withKeyId(materials.getCustomerMasterKeyId()) .withPlaintext(ByteBuffer.wrap(cek.getEncoded())) ; encryptRequest .withGeneralProgressListener(req.getGeneralProgressListener()) .withRequestMetricCollector(req.getRequestMetricCollector()) ; EncryptResult encryptResult = kms.encrypt(encryptRequest); byte[] keyBlob = copyAllBytesFrom(encryptResult.getCiphertextBlob()); return new KMSSecuredCEK(keyBlob, matdesc); } else { matdesc = materials.getMaterialsDescription(); } Key kek; if (materials.getKeyPair() != null) { // Do envelope encryption with public key from key pair kek = materials.getKeyPair().getPublic(); } else { // Do envelope encryption with symmetric key kek = materials.getSymmetricKey(); } String keyWrapAlgo = kwScheme.getKeyWrapAlgorithm(kek); try { if (keyWrapAlgo != null) { Cipher cipher = p == null ? Cipher .getInstance(keyWrapAlgo) : Cipher.getInstance( keyWrapAlgo, p); cipher.init(Cipher.WRAP_MODE, kek, srand); return new SecuredCEK(cipher.wrap(cek), keyWrapAlgo, matdesc); } // fall back to the Encryption Only (EO) key encrypting method Cipher cipher; byte[] toBeEncryptedBytes = cek.getEncoded(); String algo = kek.getAlgorithm(); if (p != null) { cipher = Cipher.getInstance(algo, p); } else { cipher = Cipher.getInstance(algo); // Use default JCE Provider } cipher.init(Cipher.ENCRYPT_MODE, kek); return new SecuredCEK(cipher.doFinal(toBeEncryptedBytes), null, matdesc); } catch (Exception e) { throw failure(e, "Unable to encrypt symmetric key"); } }
java
public double[] getConditionalDistribution(int[] sequence, int position) { double[] result = scoresOf(sequence, position); ArrayMath.logNormalize(result); // System.out.println("marginal: " + ArrayMath.toString(marginal, // nf)); // System.out.println("conditional: " + ArrayMath.toString(result, // nf)); result = ArrayMath.exp(result); // System.out.println("conditional: " + ArrayMath.toString(result, // nf)); return result; }
java
public static List<CommerceTaxFixedRateAddressRel> findByCommerceTaxMethodId( long commerceTaxMethodId, int start, int end, OrderByComparator<CommerceTaxFixedRateAddressRel> orderByComparator) { return getPersistence() .findByCommerceTaxMethodId(commerceTaxMethodId, start, end, orderByComparator); }
java
private boolean filter(final String classpathElementPath) { if (scanSpec.classpathElementFilters != null) { for (final ClasspathElementFilter filter : scanSpec.classpathElementFilters) { if (!filter.includeClasspathElement(classpathElementPath)) { return false; } } } return true; }
java
public void setUserService( com.liferay.portal.kernel.service.UserService userService) { this.userService = userService; }
java
protected void handleExceptions(MessageEvent messageEvent, Exception exception) { logger.error("Unknown exception. Internal Server Error.", exception); writeErrorResponse(messageEvent, HttpResponseStatus.INTERNAL_SERVER_ERROR, "Internal Server Error"); }
java
public TokenResult getEditToken(String pageTitle, String type) throws Exception { pageTitle = normalizeTitle(pageTitle); String editversion = ""; String action = "query"; String params = "&meta=tokens"; TokenMode tokenMode; if (getVersion().compareToIgnoreCase("Mediawiki 1.24") >= 0) { editversion = "Versions 1.24 and later"; tokenMode = TokenMode.token1_24; params = "&meta=tokens"; } else if (getVersion().compareToIgnoreCase("Mediawiki 1.20") >= 0) { editversion = "Versions 1.20-1.23"; tokenMode = TokenMode.token1_20_23; action = "tokens"; params = "&type=" + type; } else { editversion = "Version 1.19 and earlier"; tokenMode = TokenMode.token1_19; params = "&prop=info&7Crevisions&intoken=" + type + "&titles=" + pageTitle; } if (debug) { LOGGER.log(Level.INFO, "handling " + type + " token for wiki version " + getVersion() + " as " + editversion + " with action=" + action + params); } Api api = getActionResult(action, params); handleError(api); TokenResult token = new TokenResult(); token.tokenMode = tokenMode; token.tokenName = "token"; switch (tokenMode) { case token1_19: Page page = api.getQuery().getPages().get(0); if (type.equals("edit")) { token.setToken(page.getEdittoken()); } else if (type.equals("delete")) { token.setToken(page.getDeletetoken()); } break; case token1_20_23: if (type.equals("edit")) { token.setToken(api.getTokens().getEdittoken()); } else if (type.equals("delete")) { token.setToken(api.getTokens().getDeletetoken()); } break; default: token.setToken(api.getQuery().getTokens().getCsrftoken()); break; } return token; }
python
def strip_rightmost(self): """ Strip the rightmost part of the language range. If the new rightmost part is a singleton or ``x`` (i.e. starts an extension or private use part), it is also stripped. Return the newly created :class:`LanguageRange`. """ parts = self.print_str.split("-") parts.pop() if parts and len(parts[-1]) == 1: parts.pop() return type(self).fromstr("-".join(parts))
java
public static int getIntValue(String primaryKey) { Object val = CFG.get(primaryKey); if (val == null) { throw new SofaRpcRuntimeException("Not found key: " + primaryKey); } else { return Integer.parseInt(val.toString()); } }
python
def top_hits(hits, num, column, reverse): """ get top hits after sorting by column number """ hits.sort(key = itemgetter(column), reverse = reverse) for hit in hits[0:num]: yield hit
java
public Stream<T> tailStream(T key, boolean inclusive, boolean parallel) { return StreamSupport.stream(tailSpliterator(key, inclusive), parallel); }
java
public BigDecimal remainder(BigDecimal divisor, MathContext mc) { BigDecimal divrem[] = this.divideAndRemainder(divisor, mc); return divrem[1]; }
java
private void initMeasures() { durationMeasure = new DurationMeasure(); iterations = new CountingMeasure(0); solutionListMeasure = new BasicMeasure<>(); measureManager = new SimpleMeasureManager(); measureManager.setPullMeasure("currentExecutionTime", durationMeasure); measureManager.setPullMeasure("currentEvaluation", iterations); measureManager.setPushMeasure("currentPopulation", solutionListMeasure); measureManager.setPushMeasure("currentEvaluation", iterations); }
java
protected ClassMapping readClassMapping () throws IOException, ClassNotFoundException { // create our classmap if necessary if (_classmap == null) { _classmap = Lists.newArrayList(); // insert a zeroth element _classmap.add(null); } // read in the class code for this instance short code = readShort(); // a zero code indicates a null value if (code == 0) { return null; // if the code is negative, that means that we've never seen it before and class // metadata follows } else if (code < 0) { // first swap the code into positive-land code *= -1; // read in the class metadata String cname = readUTF(); // if we have a translation (used to cope when serialized classes are renamed) use // it if (_translations != null) { String tname = _translations.get(cname); if (tname != null) { cname = tname; } } // create the class mapping return mapClass(code, cname); } else { ClassMapping cmap = (code < _classmap.size()) ? _classmap.get(code) : null; // sanity check if (cmap == null) { // this will help with debugging log.warning("Internal stream error, no class metadata", "code", code, "ois", this, new Exception()); log.warning("ObjectInputStream mappings", "map", _classmap); String errmsg = "Read object code for which we have no registered class " + "metadata [code=" + code + "]"; throw new RuntimeException(errmsg); } return cmap; } }
java
@Override public SortedMap<String, Meter> getMeters(MetricFilter filter) { return getMetrics(Meter.class, filter); }
java
public void startHawkularAgent(Configuration newConfig) { if (newConfig == null) { super.startHawkularAgent(); } else { Configuration oldConfig = getConfigurationManager().getConfiguration(); boolean doNotChangeConfig = (oldConfig != null && oldConfig.getSubsystem().getImmutable()); AgentCoreEngineConfiguration agentConfig; try { agentConfig = new ConfigConverter(doNotChangeConfig ? oldConfig : newConfig).convert(); } catch (Exception e) { throw new RuntimeException("Cannot start agent - config is invalid", e); } try { if (!doNotChangeConfig) { this.configurationManager.updateConfiguration(newConfig, true); } super.startHawkularAgent(agentConfig); } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new RuntimeException(e); } } }
java
@Nonnull public CSSDeclaration setProperty (@Nonnull final ECSSProperty eProperty) { ValueEnforcer.notNull (eProperty, "Property"); return setProperty (eProperty.getName ()); }
java
private synchronized void addResolved( Artifact artifact ) { String path = artifact.getGroupId().replace( '.', '/' ) + '/' + artifact.getArtifactId() + "/" + artifact.getVersion(); Map<String, Artifact> artifactMapper = this.children.get( path ); if ( artifactMapper == null ) { artifactMapper = new HashMap<String, Artifact>(); this.children.put( path, artifactMapper ); } artifactMapper.put( artifact.getName(), artifact ); addResolved( path ); }
python
def debug_shell(user_ns, user_global_ns, traceback=None, execWrapper=None): """ Spawns some interactive shell. Tries to use IPython if available. Falls back to :func:`pdb.post_mortem` or :func:`simple_debug_shell`. :param dict[str] user_ns: :param dict[str] user_global_ns: :param traceback: :param execWrapper: :return: nothing """ ipshell = None try: # noinspection PyPackageRequirements import IPython have_ipython = True except ImportError: have_ipython = False if not ipshell and traceback and have_ipython: # noinspection PyBroadException try: # noinspection PyPackageRequirements,PyUnresolvedReferences from IPython.core.debugger import Pdb # noinspection PyPackageRequirements,PyUnresolvedReferences from IPython.terminal.debugger import TerminalPdb # noinspection PyPackageRequirements,PyUnresolvedReferences from IPython.terminal.ipapp import TerminalIPythonApp ipapp = TerminalIPythonApp.instance() ipapp.interact = False # Avoid output (banner, prints) ipapp.initialize(argv=[]) def_colors = ipapp.shell.colors pdb_obj = TerminalPdb(def_colors) pdb_obj.botframe = None # not sure. exception otherwise at quit def ipshell(): """ Run the IPython shell. """ pdb_obj.interaction(None, traceback=traceback) except Exception: print("IPython Pdb exception:") better_exchook(*sys.exc_info(), autodebugshell=False) if not ipshell and have_ipython: # noinspection PyBroadException try: # noinspection PyPackageRequirements,PyUnresolvedReferences import IPython # noinspection PyPackageRequirements,PyUnresolvedReferences import IPython.terminal.embed class DummyMod(object): """Dummy module""" module = DummyMod() module.__dict__ = user_global_ns module.__name__ = "_DummyMod" if "__name__" not in user_ns: user_ns = user_ns.copy() user_ns["__name__"] = "_DummyUserNsMod" ipshell = IPython.terminal.embed.InteractiveShellEmbed.instance( user_ns=user_ns, user_module=module) except Exception: print("IPython not available:") better_exchook(*sys.exc_info(), autodebugshell=False) else: if execWrapper: old = ipshell.run_code ipshell.run_code = lambda code: execWrapper(lambda: old(code)) if ipshell: ipshell() else: print("Use simple debug shell:") if traceback: import pdb pdb.post_mortem(traceback) else: simple_debug_shell(user_global_ns, user_ns)
java
@Override public <T> CompletionStage<T> post(URI uri, Map<String, String> headers, String data, JsonParser<T> parser) { return request(uri, headers, data, parser); }
python
def select(*cases): """ Select the first case that becomes ready. If a default case (:class:`goless.dcase`) is present, return that if no other cases are ready. If there is no default case and no case is ready, block until one becomes ready. See Go's ``reflect.Select`` method for an analog (http://golang.org/pkg/reflect/#Select). :param cases: List of case instances, such as :class:`goless.rcase`, :class:`goless.scase`, or :class:`goless.dcase`. :return: ``(chosen case, received value)``. If the chosen case is not an :class:`goless.rcase`, it will be None. """ if len(cases) == 0: return # If the first argument is a list, it should be the only argument if isinstance(cases[0], list): if len(cases) != 1: raise TypeError('Select can be called either with a list of cases ' 'or multiple case arguments, but not both.') cases = cases[0] if not cases: # Handle the case of an empty list as an argument, # and prevent the raising of a SystemError by libev. return default = None for c in cases: if c.ready(): return c, c.exec_() if isinstance(c, dcase): assert default is None, 'Only one default case is allowd.' default = c if default is not None: # noinspection PyCallingNonCallable return default, None # We need to check for deadlocks before selecting. # We can't rely on the underlying backend to do it, # as we do for channels, since we don't do an actual send or recv here. # It's possible to still have a deadlock unless we move the check into # the loop, but since the check is slow # (gevent doesn't provide a fast way), let's leave it out here. if _be.would_deadlock(): raise _Deadlock('No other tasklets running, cannot select.') while True: for c in cases: if c.ready(): return c, c.exec_() _be.yield_()
java
public static <T> Observable<T> cache(final Observable<T> source, final long duration, final TimeUnit unit, final Scheduler.Worker worker) { final AtomicReference<CachedObservable<T>> cacheRef = new AtomicReference<CachedObservable<T>>(); CachedObservable<T> cache = new CachedObservable<T>(source); cacheRef.set(cache); return cache.doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) { Runnable action = new Runnable() { @Override public void run() { cacheRef.get().reset(); } }; worker.schedule(action, duration, unit); } }); }
python
def accessible_organisms(user, orgs): """Get the list of organisms accessible to a user, filtered by `orgs`""" permission_map = { x['organism']: x['permissions'] for x in user.organismPermissions if 'WRITE' in x['permissions'] or 'READ' in x['permissions'] or 'ADMINISTRATE' in x['permissions'] or user.role == 'ADMIN' } if 'error' in orgs: raise Exception("Error received from Apollo server: \"%s\"" % orgs['error']) return [ (org['commonName'], org['id'], False) for org in sorted(orgs, key=lambda x: x['commonName']) if org['commonName'] in permission_map ]
java
private void validateRelationship(String baseTableName, String relatedTableName, String relationName) { // Verify the base and related tables exist if (!geoPackage.isTable(baseTableName)) { throw new GeoPackageException( "Base Relationship table does not exist: " + baseTableName + ", Relation: " + relationName); } if (!geoPackage.isTable(relatedTableName)) { throw new GeoPackageException( "Related Relationship table does not exist: " + relatedTableName + ", Relation: " + relationName); } // Verify spec defined relation types RelationType relationType = RelationType.fromName(relationName); if (relationType != null) { validateRelationship(baseTableName, relatedTableName, relationType); } }
python
def replace_mutating_webhook_configuration(self, name, body, **kwargs): """ replace the specified MutatingWebhookConfiguration This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_mutating_webhook_configuration(name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the MutatingWebhookConfiguration (required) :param V1beta1MutatingWebhookConfiguration body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. :return: V1beta1MutatingWebhookConfiguration If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_mutating_webhook_configuration_with_http_info(name, body, **kwargs) else: (data) = self.replace_mutating_webhook_configuration_with_http_info(name, body, **kwargs) return data
java
private boolean computeSearchDirection() { // Compute the function's gradient function.computeGradient(temp0_Nx1.data); // compute the change in gradient for( int i = 0; i < N; i++ ) { y.data[i] = temp0_Nx1.data[i] - g.data[i]; g.data[i] = temp0_Nx1.data[i]; } // Update the inverse Hessian matrix if( iterations != 0 ) { EquationsBFGS.inverseUpdate(B, s, y, temp0_Nx1, temp1_Nx1); } // compute the search direction CommonOps_DDRM.mult(-1,B,g, searchVector); // use the line search to find the next x if( !setupLineSearch(fx, x.data, g.data, searchVector.data) ) { // the search direction has a positive derivative, meaning the B matrix is // no longer SPD. Attempt to fix the situation by resetting the matrix resetMatrixB(); // do the search again, it can't fail this time CommonOps_DDRM.mult(-1,B,g, searchVector); setupLineSearch(fx, x.data, g.data, searchVector.data); } else if(Math.abs(derivAtZero) <= gtol ) { if( verbose != null ) { verbose.printf("finished select direction, gtest=%e\n",Math.abs(derivAtZero)); } // the input might have been modified by the function. So copy it System.arraycopy(function.getCurrentState(),0,x.data,0,N); return terminateSearch(true); } mode = 1; iterations++; return false; }
python
def router_removed_from_hosting_device(self, context, router): """Notify cfg agent about router removed from hosting device.""" self._notification(context, 'router_removed_from_hosting_device', [router], operation=None, shuffle_agents=False)
java
public static Set<String> getNamespaces(File file) throws ParserConfigurationException, SAXException, IOException { return getNamespaces(new InputSource(new FileReader(file))); }
python
def create_cache_cluster(name, wait=600, security_groups=None, region=None, key=None, keyid=None, profile=None, **args): ''' Create a cache cluster. Example: .. code-block:: bash salt myminion boto3_elasticache.create_cache_cluster name=myCacheCluster \ Engine=redis \ CacheNodeType=cache.t2.micro \ NumCacheNodes=1 \ SecurityGroupIds='[sg-11223344]' \ CacheSubnetGroupName=myCacheSubnetGroup ''' if security_groups: if not isinstance(security_groups, list): security_groups = [security_groups] sgs = __salt__['boto_secgroup.convert_to_group_ids'](groups=security_groups, region=region, key=key, keyid=keyid, profile=profile) if 'SecurityGroupIds' not in args: args['SecurityGroupIds'] = [] args['SecurityGroupIds'] += sgs args = dict([(k, v) for k, v in args.items() if not k.startswith('_')]) return _create_resource(name, name_param='CacheClusterId', desc='cache cluster', res_type='cache_cluster', wait=wait, status_param='CacheClusterStatus', region=region, key=key, keyid=keyid, profile=profile, **args)
python
def partition_seq(seq, size): """ Splits a sequence into an iterable of subsequences. All subsequences are of the given size, except the last one, which may be smaller. If the input list is modified while the returned list is processed, the behavior of the program is undefined. :param seq: the list to split :param size: the desired size of the sublists, must be > 0 :type size: int :return: an iterable of sublists >>> list(partition_seq("",1)) [] >>> list(partition_seq("abcde",2)) ['ab', 'cd', 'e'] >>> list(partition_seq("abcd",2)) ['ab', 'cd'] >>> list(partition_seq("abcde",1)) ['a', 'b', 'c', 'd', 'e'] >>> list(partition_seq("abcde",0)) Traceback (most recent call last): ... ValueError: Size must be greater than 0 >>> l=[1,2,3,4] >>> i = iter( partition_seq(l,2) ) >>> l.pop(0) 1 >>> next(i) [2, 3] """ if size < 1: raise ValueError('Size must be greater than 0') return (seq[pos:pos + size] for pos in xrange(0, len(seq), size))
python
def create_package_level_rst_index_file( package_name, max_depth, modules, inner_packages=None): """Function for creating text for index for a package. :param package_name: name of the package :type package_name: str :param max_depth: Value for max_depth in the index file. :type max_depth: int :param modules: list of module in the package. :type modules: list :return: A text for the content of the index file. :rtype: str """ if inner_packages is None: inner_packages = [] return_text = 'Package::' + package_name dash = '=' * len(return_text) return_text += '\n' + dash + '\n\n' return_text += '.. toctree::' + '\n' return_text += ' :maxdepth: ' + str(max_depth) + '\n\n' upper_package = package_name.split('.')[-1] for module in modules: if module in EXCLUDED_PACKAGES: continue return_text += ' ' + upper_package + os.sep + module[:-3] + '\n' for inner_package in inner_packages: if inner_package in EXCLUDED_PACKAGES: continue return_text += ' ' + upper_package + os.sep + inner_package + '\n' return return_text
python
def clear_usersettings_cache(sender, **kwargs): """ Clears the cache (if primed) each time a ``UserSettings`` is saved or deleted """ instance = kwargs['instance'] try: del USERSETTINGS_CACHE[instance.site.pk] except KeyError: pass
java
private static int notifyProgress(UploadProgress notifier, File file, int bytesWritten, int bytesTotal, int prevPct) { if (notifier != null) { int pct = (int) ((bytesWritten * 100L) / bytesTotal); // We don't want to inform about the same pct twice if (prevPct != pct) { notifier.uploadProgress(file, bytesTotal, pct); return pct; } return prevPct; } return prevPct; }
java
public static SpriteTiled loadSpriteTiled(ImageBuffer surface, int tileWidth, int tileHeight) { return new SpriteTiledImpl(surface, tileWidth, tileHeight); }
python
def _upload_or_replace_fb(self,directory,fn,_album_id,\ _megapixels=None,resize_request=None,movealbum_request=None,\ changetitle_request=None,_title=None): """Does the actual upload to fb. if resize_request, will resize picture only if it already exists and the geometry on fb doesn't match what we want, returns (status)""" # We should check here if db=self._loadDB(directory) # If resize request, make tempfile and # resize. if _megapixels: fp = tempfile.NamedTemporaryFile() fullfile_resized=fp.name logger.debug("tempfile for resized is %s"%(fp.name)) fullfile=os.path.join(directory,fn) # If JPEG, then resize ext=os.path.splitext(fullfile)[1].lower() if ext=='.jpg': isJPG=True else: isJPG=False # If already in DB, remove first, then overwrite if fn in db: pid=db[fn]['photoid'] if resize_request and isJPG: logger.info("fb: Resize request for %s",fn) if self._already_resized_on_fb(fullfile,pid,_megapixels): logger.debug("%s - Already in DB and resized, skipping",fn) return True elif movealbum_request: logger.info("fb: Move album request for %s",fn) if self._already_in_album(fullfile,pid,_album_id): logger.debug("%s - Already in DB and in correct album, skipping",fn) return True elif changetitle_request: logger.info("fb: Change title request for %s",fn) if self._title_uptodate(fullfile,pid,_title): logger.debug("%s - Already in DB and title up to date, skipping",fn) return True # --- If we are here it means photo should be updated. # With FB graph API this means removing the photo # and uploading with new meta data. logger.debug("%s - Already in DB, removing first",fn) if not self._remove_media(directory,fn): logger.error("%s - fb: couldn't replace (remove) file\n",fn) return False # Do we have to resize? if _megapixels and isJPG: if pusher_utils.resize_image(fullfile,fullfile_resized,_megapixels): logger.debug("%s resized to %s successfully"\ %(fullfile,fullfile_resized)) fullfile=fullfile_resized else: logger.warning("%s couldn't resize, uploading original"\ %(fullfile)) logger.debug("Upload %s to fb, album=%s, title='%s'",\ fn,_album_id,_title) # We can get a place id by doing a search # http://graph.facebook.com/search?type=city&center=37,-122&distance=1000 # Do the actual upload resp=self.fb.put_photo(open(fullfile),\ message=_title,album_id=_album_id,\ ) #place='106377336067638'\ logger.debug("%s - Upload response is : %s"%(fn,resp)) if not resp.has_key('id'): print("%s - fb: upload failed", fn) return False pid=resp['id'] db[fn]={} db[fn]['photoid']=pid logger.debug("%s - fb: uploaded with photoid %s",fn,pid); self._saveDB(directory,db) return True
java
private void addAliasDefinition(AliasDefinition aliasDef) { // Prerequisites: assert aliasDef != null; assert !m_aliasDefMap.containsKey(aliasDef.getName()); assert aliasDef.getTableName().equals(this.getTableName()); m_aliasDefMap.put(aliasDef.getName(), aliasDef); }
python
def create_warped_grid(image, grid_step=10, grid_width=2, grid_directions=(True, True), fixed_reference_image=None, transform=None, foreground=1, background=0): """ Deforming a grid is a helpful way to visualize a deformation field. This function enables a user to define the grid parameters and apply a deformable map to that grid. ANTsR function: `createWarpedGrid` Arguments --------- image : ANTsImage input image grid_step : scalar width of grid blocks grid_width : scalar width of grid lines grid_directions : tuple of booleans directions in which to draw grid lines, boolean vector fixed_reference_image : ANTsImage (optional) reference image space transform : list/tuple of strings (optional) vector of transforms foreground : scalar intensity value for grid blocks background : scalar intensity value for grid lines Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read( ants.get_ants_data( 'r16' ) ) >>> mi = ants.image_read( ants.get_ants_data( 'r64' ) ) >>> mygr = ants.create_warped_grid( mi ) >>> mytx = ants.registration(fixed=fi, moving=mi, type_of_transform = ('SyN') ) >>> mywarpedgrid = ants.create_warped_grid( mi, grid_directions=(False,True), transform=mytx['fwdtransforms'], fixed_reference_image=fi ) """ if isinstance(image, iio.ANTsImage): if len(grid_directions) != image.dimension: grid_directions = [True]*image.dimension garr = image.numpy() * 0 + foreground else: if not isinstance(image, (list, tuple)): raise ValueError('image arg must be ANTsImage or list or tuple') if len(grid_directions) != len(image): grid_directions = [True]*len(image) garr = np.zeros(image) + foreground image = iio2.from_numpy(garr) idim = garr.ndim gridw = grid_width for d in range(idim): togrid = np.arange(-1, garr.shape[d]-1, step=grid_step) for i in range(len(togrid)): if (d == 0) & (idim == 3) & (grid_directions[d]): garr[togrid[i]:(togrid[i]+gridw),...] = background garr[0,...] = background garr[-1,...] = background if (d == 1) & (idim == 3) & (grid_directions[d]): garr[:,togrid[i]:(togrid[i]+gridw),:] = background garr[:,0,:] = background garr[:,-1,:] = background if (d == 2) & (idim == 3) & (grid_directions[d]): garr[...,togrid[i]:(togrid[i]+gridw)] = background garr[...,0] = background garr[...,-1] = background if (d == 0) & (idim == 2) & (grid_directions[d]): garr[togrid[i]:(togrid[i]+gridw),:] = background garr[0,:] = background garr[-1,:] = background if (d == 1) & (idim == 2) & (grid_directions[d]): garr[:,togrid[i]:(togrid[i]+gridw)] = background garr[:,0] = background garr[:,-1] = background gimage = image.new_image_like(garr) if (transform is not None) and (fixed_reference_image is not None): return apply_transforms( fixed=fixed_reference_image, moving=gimage, transformlist=transform ) else: return gimage
java
@Override public long convert(int[] input, int inPos) { int Y = input[inPos++] & 0xFF; int Cb = input[inPos++] & 0xFF; int Cr = input[inPos] & 0xFF; byte r = (byte) sampleRangeLimitTable[sampleRangeLimitOffset + Y + Cr2R[Cr]]; byte g = (byte) sampleRangeLimitTable[sampleRangeLimitOffset + Y + ((Cb2G[Cb] + Cr2G[Cr]) >> 16)]; byte b = (byte) sampleRangeLimitTable[sampleRangeLimitOffset + Y + Cb2B[Cb]]; return (0xFF000000L | ((r & 0xFF) << 16) | ((g & 0xFF) << 8) | (b & 0xFF)); }
java
public static Iterable<MutableIntTuple> vonNeumannNeighborhoodIterable( IntTuple center, final int radius) { final IntTuple localCenter = IntTuples.copy(center); return new Iterable<MutableIntTuple>() { @Override public Iterator<MutableIntTuple> iterator() { return new VonNeumannIntTupleIterator(localCenter, radius); } }; }
python
def _getframe(level=0): ''' A reimplementation of `sys._getframe`. `sys._getframe` is a private function, and isn't guaranteed to exist in all versions and implementations of Python. This function is about 2 times slower than the native implementation. It relies on the asumption that the traceback objects have `tb_frame` attributues holding proper frame objects. :param level: The number of levels deep in the stack to return the frame from. Defaults to `0`. :returns: A frame object `levels` deep from the top of the stack. ''' if level < 0: level = 0 try: raise except: # `sys.exc_info` returns `(type, value, traceback)`. _, _, traceback = sys.exc_info() frame = traceback.tb_frame # Account for our exception, this will stop at `-1`. while ~level: frame = frame.f_back if frame is None: break level -= 1 finally: sys.exc_clear() # Act as close to `sys._getframe` as possible. if frame is None: raise ValueError('call stack is not deep enough') return frame
python
def getLocalFactories(self, From, to, protocolName): """ Returns a list of 2-tuples of (protocolFactory, description) to handle this from/to/protocolName @param From: @param to: @param protocolName: @return: """ result = [] x = self.localFactoriesMapping.get((to, protocolName), ()) result.extend(x) y = self.protocolFactoryFactory(From, to, protocolName) result.extend(y) return result
java
public List<byte[]> getSubAuthorities() { final List<byte[]> res = new ArrayList<>(getSubAuthorityCount()); for (byte[] sub : subAuthorities) { if (sub != null) { res.add(Arrays.copyOf(sub, sub.length)); } } return Collections.unmodifiableList(res); }
java
public final void entryRuleXConstructorCall() throws RecognitionException { try { // InternalXbase.g:1259:1: ( ruleXConstructorCall EOF ) // InternalXbase.g:1260:1: ruleXConstructorCall EOF { if ( state.backtracking==0 ) { before(grammarAccess.getXConstructorCallRule()); } pushFollow(FOLLOW_1); ruleXConstructorCall(); state._fsp--; if (state.failed) return ; if ( state.backtracking==0 ) { after(grammarAccess.getXConstructorCallRule()); } match(input,EOF,FOLLOW_2); if (state.failed) return ; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; }
java
public PagedList<BuildArgumentInner> listBuildArguments(final String resourceGroupName, final String registryName, final String buildTaskName, final String stepName) { ServiceResponse<Page<BuildArgumentInner>> response = listBuildArgumentsSinglePageAsync(resourceGroupName, registryName, buildTaskName, stepName).toBlocking().single(); return new PagedList<BuildArgumentInner>(response.body()) { @Override public Page<BuildArgumentInner> nextPage(String nextPageLink) { return listBuildArgumentsNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; }
python
def make_view_field(field, obj=None, types_convert_map=None, fields_convert_map=None, value=__default_value__, auto_convert=True): """ If auto_convert, then all values will be converted to string format, otherwise remain the orignal value """ from uliweb.utils.textconvert import text2html from uliweb.core.html import Tag old_value = value types_convert_map = types_convert_map or {} fields_convert_map = fields_convert_map or {} default_convert_map = {orm.TextProperty:lambda v,o:text2html(v)} if isinstance(field, dict): if 'prop' in field and field.get('prop'): prop = field['prop'] else: prop = field name = field.get('name') else: prop = field name = prop.property_name #not real Property instance, then return itself, so if should return #just like {'label':xxx, 'value':xxx, 'display':xxx} if not isinstance(prop, orm.Property): if old_value is __default_value__: value = prop.get('value', '') display = prop.get('display', value) label = prop.get('label', '') or prop.get('verbose_name', '') convert = prop.get('convert', None) else: if old_value is __default_value__: if isinstance(obj, Model): value = prop.get_value_for_datastore(obj) if value is Lazy: getattr(obj, prop.property_name) value = prop.get_value_for_datastore(obj) else: value = obj[name] if auto_convert or prop.choices: display = prop.get_display_value(value) else: display = value if isinstance(field, dict): initial = field.get('verbose_name', None) else: initial = '' label = initial or prop.verbose_name or name if name in fields_convert_map: convert = fields_convert_map.get(name, None) else: if isinstance(prop, orm.Property): convert = types_convert_map.get(prop.__class__, None) if not convert: convert = default_convert_map.get(prop.__class__, None) convert_result = None if convert: convert_result = convert(value, obj) if convert_result is None: if value is not None: if isinstance(prop, orm.ManyToMany): s = [] #support value parameter, the old value is already stored in "old_value" variable if old_value is not __default_value__: if prop.reference_fieldname == 'id': query = [] for _id in old_value: _v = functions.get_cached_object(prop.reference_class, _id) query.append(_v) else: query = prop.reference_class.filter(prop.reference_class.c[prop.reversed_fieldname].in_(old_value)) else: if prop.reference_fieldname == 'id': query = [] _ids = prop.get_value_for_datastore(obj, cached=True) for _id in _ids: _v = functions.get_cached_object(prop.reference_class, _id) if not _v: log.debug("Can't find object %s:%d" % (prop.reference_class.__name__, _id)) _v = _id query.append(_v) else: query = getattr(obj, prop.property_name).all() for x in query: if isinstance(x, orm.Model): s.append(get_obj_url(x)) else: s.append(str(x)) display = ' '.join(s) elif isinstance(prop, orm.ReferenceProperty) or isinstance(prop, orm.OneToOne): try: if old_value is not __default_value__: d = prop.reference_class.c[prop.reference_fieldname] if prop.reference_fieldname == 'id': v = functions.get_cached_object(prop.reference_class, old_value) else: v = prop.reference_class.get(d==old_value) if not isinstance(obj, Model): d = prop.reference_class.c[prop.reference_fieldname] if prop.reference_fieldname == 'id': v = functions.get_cached_object(prop.reference_class, value) else: v = prop.reference_class.get(d==value) else: if prop.reference_fieldname == 'id': v = functions.get_cached_object(prop.reference_class, obj.get_datastore_value(prop.property_name)) else: v = functions.get_cached_object(prop.reference_class, condition=prop.reference_class.c[prop.reference_fieldname]==obj.get_datastore_value(prop.property_name)) except orm.Error: display = prop.get_datastore_value(obj) or '' v = None if isinstance(v, Model): display = get_obj_url(v) else: display = str(v if v is not None else '') elif isinstance(prop, orm.FileProperty): url = functions.get_href(value) if url: display = str(Tag('a', value, href=url)) else: display = '' # if isinstance(prop, orm.Property) and prop.choices is not None: # display = prop.get_display_value(value) if prop.__class__ is orm.TextProperty: display = text2html(value) else: display = convert_result if isinstance(display, unicode): display = display.encode('utf-8') if display is None: display = '' return Storage({'label':label, 'value':value, 'display':display, 'name':name})
java
public static Executor newFixedThreadPool(RateRule... rules) { int nThreads = Integer.MAX_VALUE; for (RateRule r : rules) { if (r.getRequests() < nThreads) { nThreads = r.getRequests(); } } if (nThreads == Integer.MAX_VALUE) { nThreads = DEFAULT_FIXED_POOL_SIZE; } return Executors.newFixedThreadPool(nThreads); }
python
def _generic_search(cls, name, search_string, metadata={}, ignore=''): """ Searches for a specific string given three types of regex search types. Also auto-checks for camel casing. :param name: str, name of object in question :param search_string: str, string to find and insert into the search regexes :param metadata: dict, metadata to add to the result if we find a match :param ignore: str, ignore specific string for the search :return: dict, dictionary of search results """ patterns = [cls.REGEX_ABBR_SEOS, cls.REGEX_ABBR_ISLAND, cls.REGEX_ABBR_CAMEL] if not search_string[0].isupper(): patterns.remove(cls.REGEX_ABBR_CAMEL) for pattern in patterns: search_result = cls._get_regex_search(name, pattern.format(ABBR=search_string, SEP=cls.REGEX_SEPARATORS), metadata=metadata, match_index=0, ignore=ignore) if search_result is not None: if cls.is_valid_camel(search_result.get('match_full'), strcmp=search_result.get('match')): return search_result return None
java
public void setOnKeyUp(String onkeyup) { AbstractHtmlState tsh = getState(); tsh.registerAttribute(AbstractHtmlState.ATTR_JAVASCRIPT, ONKEYUP, onkeyup); }
python
def chart( symbols=("AAPL", "GLD", "GOOG", "$SPX", "XOM", "msft"), start=datetime.datetime(2008, 1, 1), end=datetime.datetime(2009, 12, 31), # data stops at 2013/1/1 normalize=True, ): """Display a graph of the price history for the list of ticker symbols provided Arguments: symbols (list of str): Ticker symbols like "GOOG", "AAPL", etc start (datetime): The date at the start of the period being analyzed. end (datetime): The date at the end of the period being analyzed. normalize (bool): Whether to normalize prices to 1 at the start of the time series. """ start = util.normalize_date(start or datetime.date(2008, 1, 1)) end = util.normalize_date(end or datetime.date(2009, 12, 31)) symbols = [s.upper() for s in symbols] timeofday = datetime.timedelta(hours=16) timestamps = du.getNYSEdays(start, end, timeofday) ls_keys = ['open', 'high', 'low', 'close', 'volume', 'actual_close'] ldf_data = da.get_data(timestamps, symbols, ls_keys) d_data = dict(zip(ls_keys, ldf_data)) na_price = d_data['close'].values if normalize: na_price /= na_price[0, :] plt.clf() plt.plot(timestamps, na_price) plt.legend(symbols) plt.ylabel('Adjusted Close') plt.xlabel('Date') plt.savefig('chart.pdf', format='pdf') plt.grid(True) plt.show() return na_price
python
def register_name(self, register_index): """Retrives and returns the name of an ARM CPU register. Args: self (JLink): the ``JLink`` instance register_index (int): index of the register whose name to retrieve Returns: Name of the register. """ result = self._dll.JLINKARM_GetRegisterName(register_index) return ctypes.cast(result, ctypes.c_char_p).value.decode()
java
private void readStreamResult(InH3 hIn, HeadersAmp headers) throws IOException { ServiceRefAmp serviceRef = readToAddress(hIn); long id = hIn.readLong(); QueryRefAmp queryRef = serviceRef.getQueryRef(id); if (queryRef != null) { ClassLoader loader = queryRef.getClassLoader(); Thread thread = Thread.currentThread(); thread.setContextClassLoader(loader); // XXX: _serializer.setClassLoader(loader); } int sequence = hIn.readInt(); List<Object> values = (List) hIn.readObject(); Throwable exn = (Throwable) hIn.readObject(Throwable.class); boolean isComplete = hIn.readBoolean(); if (log.isLoggable(_logLevel)) { log.log(_logLevel, "stream-result-r " + values + "," + isComplete + " (in " + this + ")" + "\n {id:" + id + ", to:" + serviceRef + "," + headers + "}"); } if (queryRef != null) { if (queryRef.accept(headers, values, sequence, isComplete)) { serviceRef.removeQueryRef(id); } if (exn != null) { serviceRef.removeQueryRef(id); queryRef.fail(headers, exn); } /* if (isComplete) { // XXX: timing //serviceRef.removeQueryRef(id); // queryRef.completeStream(headers, sequence); } */ /* else if (queryRef.isCancelled()) { System.out.println("CANCEL_ME: " + queryRef); } */ } else if (log.isLoggable(Level.WARNING)) { log.warning("query-result qid=" + id + " for service " + serviceRef + " does not match any known queries.\n" + headers); } }
java
@Override public void apply() throws ContradictionException { if (branch == 1) { if (to == -1) { assignment.apply(var, from, this); } else { assignment.apply(var, from, to, this); } } else if (branch == 2) { if (to == -1) { assignment.unapply(var, from, this); } else { assignment.unapply(var, from, to, this); } } }
java
@Override public <P> void pojoDaoCreated(PojoDaoFactoryEvent<P> event) { String tableName = event.getTableName(); Schema schema = new Schema(); event.getPojoBinding().describe(schema.createTable(tableName), schema); schema.items().forEach(schemaItem -> { final String key = schemaItem.getKey(); final String claimId = UUID.randomUUID().toString(); session.execute(insertStatement.bind(key, claimId)); Row row = session.execute(selectStatement.bind(key)).one(); if (StringUtils.equals(claimId, row.getString(0))) { final SchemaStatement statement = schemaItem.createStatement(); LOGGER.info("Creating table(s) to support \"{}\":\n\t{}\n", event.getPojoBinding().getPojoType().getSimpleName(), statement); session.execute(statement); } }); }
python
def epubcheck_help(): """Return epubcheck.jar commandline help text. :return unicode: helptext from epubcheck.jar """ # tc = locale.getdefaultlocale()[1] with open(os.devnull, "w") as devnull: p = subprocess.Popen( [c.JAVA, '-Duser.language=en', '-jar', c.EPUBCHECK, '-h'], stdout=subprocess.PIPE, stderr=devnull, ) result = p.communicate()[0] return result.decode()
java
private void attemptMatch(BasicBlock basicBlock, BasicBlock.InstructionIterator instructionIterator) throws DataflowAnalysisException { work(new State(basicBlock, instructionIterator, pattern.getFirst())); }
python
def fast_lyapunov_max(w0, hamiltonian, dt, n_steps, d0=1e-5, n_steps_per_pullback=10, noffset_orbits=2, t1=0., atol=1E-10, rtol=1E-10, nmax=0, return_orbit=True): """ Compute the maximum Lyapunov exponent using a C-implemented estimator that uses the DOPRI853 integrator. Parameters ---------- w0 : `~gala.dynamics.PhaseSpacePosition`, array_like Initial conditions. hamiltonian : `~gala.potential.Hamiltonian` dt : numeric Timestep. n_steps : int Number of steps to run for. d0 : numeric (optional) The initial separation. n_steps_per_pullback : int (optional) Number of steps to run before re-normalizing the offset vectors. noffset_orbits : int (optional) Number of offset orbits to run. t1 : numeric (optional) Time of initial conditions. Assumed to be t=0. return_orbit : bool (optional) Store the full orbit for the parent and all offset orbits. Returns ------- LEs : :class:`~astropy.units.Quantity` Lyapunov exponents calculated from each offset / deviation orbit. orbit : `~gala.dynamics.Orbit` (optional) """ from .lyapunov import dop853_lyapunov_max, dop853_lyapunov_max_dont_save # TODO: remove in v1.0 if isinstance(hamiltonian, PotentialBase): from ..potential import Hamiltonian hamiltonian = Hamiltonian(hamiltonian) if not hamiltonian.c_enabled: raise TypeError("Input Hamiltonian must contain a C-implemented " "potential and frame.") if not isinstance(w0, PhaseSpacePosition): w0 = np.asarray(w0) ndim = w0.shape[0]//2 w0 = PhaseSpacePosition(pos=w0[:ndim], vel=w0[ndim:]) _w0 = np.squeeze(w0.w(hamiltonian.units)) if _w0.ndim > 1: raise ValueError("Can only compute fast Lyapunov exponent for a single orbit.") if return_orbit: t,w,l = dop853_lyapunov_max(hamiltonian, _w0, dt, n_steps+1, t1, d0, n_steps_per_pullback, noffset_orbits, atol, rtol, nmax) w = np.rollaxis(w, -1) try: tunit = hamiltonian.units['time'] except (TypeError, AttributeError): tunit = u.dimensionless_unscaled orbit = Orbit.from_w(w=w, units=hamiltonian.units, t=t*tunit, hamiltonian=hamiltonian) return l/tunit, orbit else: l = dop853_lyapunov_max_dont_save(hamiltonian, _w0, dt, n_steps+1, t1, d0, n_steps_per_pullback, noffset_orbits, atol, rtol, nmax) try: tunit = hamiltonian.units['time'] except (TypeError, AttributeError): tunit = u.dimensionless_unscaled return l/tunit
python
def download(self, filename=None): """ Download snapshot to filename :param str filename: fully qualified path including filename .zip :raises EngineCommandFailed: IOError occurred downloading snapshot :return: None """ if not filename: filename = '{}{}'.format(self.name, '.zip') try: self.make_request( EngineCommandFailed, resource='content', filename=filename) except IOError as e: raise EngineCommandFailed("Snapshot download failed: {}" .format(e))
python
def parse(self, limit=None): """ We process each of the postgres tables in turn. The order of processing is important here, as we build up a hashmap of internal vs external identifers (unique keys by type to FB id). These include allele, marker (gene), publication, strain, genotype, annotation (association), and descriptive notes. :param limit: Only parse this many lines of each table :return: """ if limit is not None: LOG.info("Only parsing first %d rows of each file", limit) LOG.info("Parsing files...") if self.test_only: self.test_mode = True # the following will provide us the hash-lookups self._process_dbxref() self._process_cvterm() self._process_genotypes(limit) self._process_pubs(limit) # do this before environments to get the external ids self._process_environment_cvterm() self._process_environments() self._process_organisms(limit) # must be done before features self._process_organism_dbxref(limit) self._process_features(limit) self._process_phenotype(limit) self._process_phenotype_cvterm() # gets external mappings for features (genes, variants, etc) self._process_feature_dbxref(limit) # do this after organisms to get the right taxonomy self._process_stocks(limit) # figures out types of some of the features self._get_derived_feature_types(limit) # These are the associations amongst the objects above self._process_stockprop(limit) self._process_pub_dbxref(limit) self._process_phendesc(limit) self._process_feature_genotype(limit) self._process_feature_pub(limit) self._process_stock_genotype(limit) self._process_phenstatement(limit) # these are G2P associations self._process_feature_relationship(limit) self._process_disease_models(limit) # TODO add version info from file somehow # (in parser rather than during fetching) LOG.info("Finished parsing.") LOG.info("Loaded %d nodes", len(self.graph)) return
java
@Override protected com.itextpdf.text.Image createImage(PdfContentByte canvas, Object data, float opacity) throws VectorPrintException, BadElementException { if (getImageBeingProcessed()!=null) { return getImageBeingProcessed(); } this.data = data; boolean doFooter = getSettings().getBooleanProperty(Boolean.FALSE, ReportConstants.PRINTFOOTER); if (doFooter && getValue(NOFOOTER, Boolean.class)) { getSettings().put(ReportConstants.PRINTFOOTER, "false"); } // remember page size Rectangle r = getDocument().getPageSize(); // each page on its own page in the pdf to be written if (getValue(DocumentSettings.KEYSTORE, URL.class)!=null) { char[] pw = getValue(DocumentSettings.KEYSTORE_PASSWORD, char[].class); KeyStore ks = null; try { ks = CertificateHelper.loadKeyStore(getValue(DocumentSettings.KEYSTORE, URL.class).openStream(), getValue(KEYSTORETYPE_PARAM, DocumentSettings.KEYSTORETYPE.class).name(), pw.clone()); String alias = getSettings().getProperty(DEFAULTKEYSTORE_ALIAS, KEYSTOREALIAS); String provider = getSettings().getProperty(DEFAULTSECURITYPROVIDER, SECURITYPROVIDER); getImageLoader().loadPdf( getValue(Image.URLPARAM, URL.class).openStream(), getWriter(), ks.getCertificate(alias), CertificateHelper.getKey(ks, alias, pw.clone()), provider, this, ArrayHelper.unWrap(getValue(NumberCondition.NUMBERS, Integer[].class))); } catch (KeyStoreException | IOException | NoSuchAlgorithmException | CertificateException | UnrecoverableKeyException ex) { throw new VectorPrintException(ex); } } else { getImageLoader().loadPdf( getValue(Image.URLPARAM, URL.class), getWriter(), getValue(DocumentSettings.PASSWORD, byte[].class), this, ArrayHelper.unWrap(getValue(NumberCondition.NUMBERS, Integer[].class))); } // restore settings getDocument().setPageSize(r); getDocument().newPage(); if (doFooter && getValue(NOFOOTER, Boolean.class)) { getSettings().put(ReportConstants.PRINTFOOTER, "true"); } return null; }
python
def num_model_per_iteration(self): """Get number of models per iteration. Returns ------- model_per_iter : int The number of models per iteration. """ model_per_iter = ctypes.c_int(0) _safe_call(_LIB.LGBM_BoosterNumModelPerIteration( self.handle, ctypes.byref(model_per_iter))) return model_per_iter.value
python
async def save(self, request, response): """Save session to response cookies.""" if isinstance(response, Response) and SESSION_KEY in request and not response.prepared: session = request[SESSION_KEY] if session.save(response.set_cookie): self.app.logger.debug('Session saved: %s', session)
python
def parse_args(self, ap_mac, ssid, passphrase, channel=None, # KRACK attack options double_3handshake=True, encrypt_3handshake=True, wait_3handshake=0, double_gtk_refresh=True, arp_target_ip=None, arp_source_ip=None, wait_gtk=10, **kwargs): """ Mandatory arguments: @iface: interface to use (must be in monitor mode) @ap_mac: AP's MAC @ssid: AP's SSID @passphrase: AP's Passphrase (min 8 char.) Optional arguments: @channel: used by the interface. Default 6, autodetected on windows Krack attacks options: - Msg 3/4 handshake replay: double_3handshake: double the 3/4 handshake message encrypt_3handshake: encrypt the second 3/4 handshake message wait_3handshake: time to wait (in sec.) before sending the second 3/4 - double GTK rekeying: double_gtk_refresh: double the 1/2 GTK rekeying message wait_gtk: time to wait (in sec.) before sending the GTK rekeying arp_target_ip: Client IP to use in ARP req. (to detect attack success) If None, use a DHCP server arp_source_ip: Server IP to use in ARP req. (to detect attack success) If None, use the DHCP server gateway address """ super(KrackAP, self).parse_args(**kwargs) # Main AP options self.mac = ap_mac self.ssid = ssid self.passphrase = passphrase if channel is None: if WINDOWS: try: channel = kwargs.get("iface", conf.iface).channel() except (Scapy_Exception, AttributeError): channel = 6 else: channel = 6 self.channel = channel # Internal structures self.last_iv = None self.client = None self.seq_num = count() self.replay_counter = count() self.time_handshake_end = None self.dhcp_server = DHCPOverWPA(send_func=self.send_ether_over_wpa, pool=Net("192.168.42.128/25"), network="192.168.42.0/24", gw="192.168.42.1") self.arp_sent = [] self.arp_to_send = 0 self.arp_retry = 0 # Bit 0: 3way handshake sent # Bit 1: GTK rekeying sent # Bit 2: ARP response obtained self.krack_state = 0 # Krack options self.double_3handshake = double_3handshake self.encrypt_3handshake = encrypt_3handshake self.wait_3handshake = wait_3handshake self.double_gtk_refresh = double_gtk_refresh self.arp_target_ip = arp_target_ip if arp_source_ip is None: # Use the DHCP server Gateway address arp_source_ip = self.dhcp_server.gw self.arp_source_ip = arp_source_ip self.wait_gtk = wait_gtk # May take several seconds self.install_PMK()
java
public UntagResourceRequest withTagsToRemove(String... tagsToRemove) { if (this.tagsToRemove == null) { setTagsToRemove(new java.util.ArrayList<String>(tagsToRemove.length)); } for (String ele : tagsToRemove) { this.tagsToRemove.add(ele); } return this; }
python
def unCompressed(self): """ Derive uncompressed key """ public_key = repr(self._pk) prefix = public_key[0:2] if prefix == "04": return public_key assert prefix == "02" or prefix == "03" x = int(public_key[2:], 16) y = self._derive_y_from_x(x, (prefix == "02")) key = '04' + '%064x' % x + '%064x' % y return key
python
def _write_cache(self, lines, append=False): """Write virtualenv metadata to cache.""" mode = 'at' if append else 'wt' with open(self.filepath, mode, encoding='utf8') as fh: fh.writelines(line + '\n' for line in lines)
python
def from_start_and_end(cls, start, end, aa=None, major_pitch=225.8, major_radius=5.07, major_handedness='l', minor_helix_type='alpha', orientation=1, phi_c_alpha=0.0, minor_repeat=None): """Creates a `HelicalHelix` between a `start` and `end` point.""" start = numpy.array(start) end = numpy.array(end) if aa is None: minor_rise_per_residue = _helix_parameters[minor_helix_type][1] aa = int((numpy.linalg.norm(end - start) / minor_rise_per_residue) + 1) instance = cls( aa=aa, major_pitch=major_pitch, major_radius=major_radius, major_handedness=major_handedness, minor_helix_type=minor_helix_type, orientation=orientation, phi_c_alpha=phi_c_alpha, minor_repeat=minor_repeat) instance.move_to(start=start, end=end) return instance
python
def zoom_out(self, decr=1.0): """Zoom out a level. Also see :meth:`zoom_to`. Parameters ---------- decr : float (optional, defaults to 1) The value to decrease the zoom level """ level = self.zoom.calc_level(self.t_['scale']) self.zoom_to(level - decr)
python
def inspect(config_file, profile): """Inspect existing configuration/profile.""" try: section = load_profile_from_files( [config_file] if config_file else None, profile) click.echo("Configuration file: {}".format(config_file if config_file else "auto-detected")) click.echo("Profile: {}".format(profile if profile else "auto-detected")) click.echo("---") for key, val in section.items(): click.echo("{} = {}".format(key, val)) except (ValueError, ConfigFileReadError, ConfigFileParseError) as e: click.echo(e)
python
def _public(self, command, **params): """Invoke the 'command' public API with optional params.""" params['command'] = command response = self.session.get(self._public_url, params=params) return response
python
def get_controller_list(self): """ Returns an iterable of tuples containing (index, controller_name) pairs. Controller indexes start at 0. You may easily transform this to a {name: index} mapping by using: >>> controllers = {name: index for index, name in raildriver.get_controller_list()} :return enumerate """ ret_str = self.dll.GetControllerList().decode() if not ret_str: return [] return enumerate(ret_str.split('::'))
java
public static String getJarManifestValue(Class clazz, String attrName) { URL url = getResource("/" + clazz.getName().replace('.', '/') + ".class"); if (url != null) try { URLConnection uc = url.openConnection(); if (uc instanceof java.net.JarURLConnection) { JarURLConnection juc = (JarURLConnection) uc; Manifest m = juc.getManifest(); return m.getMainAttributes().getValue(attrName); } } catch (IOException e) { return null; } return null; }
python
def _cmdloop(self) -> bool: """Repeatedly issue a prompt, accept input, parse an initial prefix off the received input, and dispatch to action methods, passing them the remainder of the line as argument. This serves the same role as cmd.cmdloop(). :return: True implies the entire application should exit. """ # An almost perfect copy from Cmd; however, the pseudo_raw_input portion # has been split out so that it can be called separately if self.use_rawinput and self.completekey and rl_type != RlType.NONE: # Set up readline for our tab completion needs if rl_type == RlType.GNU: # Set GNU readline's rl_basic_quote_characters to NULL so it won't automatically add a closing quote # We don't need to worry about setting rl_completion_suppress_quote since we never declared # rl_completer_quote_characters. saved_basic_quotes = ctypes.cast(rl_basic_quote_characters, ctypes.c_void_p).value rl_basic_quote_characters.value = None saved_completer = readline.get_completer() readline.set_completer(self.complete) # Break words on whitespace and quotes when tab completing completer_delims = " \t\n" + ''.join(constants.QUOTES) if self.allow_redirection: # If redirection is allowed, then break words on those characters too completer_delims += ''.join(constants.REDIRECTION_CHARS) saved_delims = readline.get_completer_delims() readline.set_completer_delims(completer_delims) # Enable tab completion readline.parse_and_bind(self.completekey + ": complete") stop = False try: while not stop: if self.cmdqueue: # Run command out of cmdqueue if nonempty (populated by load command or commands at invocation) line = self.cmdqueue.pop(0) if self.echo and line != 'eos': self.poutput('{}{}'.format(self.prompt, line)) else: # Otherwise, read a command from stdin try: line = self.pseudo_raw_input(self.prompt) except KeyboardInterrupt as ex: if self.quit_on_sigint: raise ex else: self.poutput('^C') line = '' # Run the command along with all associated pre and post hooks stop = self.onecmd_plus_hooks(line) finally: if self.use_rawinput and self.completekey and rl_type != RlType.NONE: # Restore what we changed in readline readline.set_completer(saved_completer) readline.set_completer_delims(saved_delims) if rl_type == RlType.GNU: readline.set_completion_display_matches_hook(None) rl_basic_quote_characters.value = saved_basic_quotes elif rl_type == RlType.PYREADLINE: # noinspection PyUnresolvedReferences readline.rl.mode._display_completions = orig_pyreadline_display self.cmdqueue.clear() self._script_dir.clear() return stop
python
def getWinners(self, profile, sampleFileName = None): """ Returns a list of all winning candidates when we use MCMC approximation to compute Bayesian utilities for an election profile. :ivar Profile profile: A Profile object that represents an election profile. :ivar str sampleFileName: An optional argument for the name of the input file containing sample data. If a file name is given, this method will use the samples in the file instead of generating samples itself. """ if sampleFileName != None: candScores = self.getCandScoresMapFromSamplesFile(profile, sampleFileName) else: candScores = self.getCandScoresMap(profile) # Check whether the winning candidate is the candidate that maximizes the score or # minimizes it. if self.maximizeCandScore == True: bestScore = max(candScores.values()) else: bestScore = min(candScores.values()) # Create a list of all candidates with the winning score and return it. winners = [] for cand in candScores.keys(): if candScores[cand] == bestScore: winners.append(cand) return winners
java
public static String escapeRegExp(final String value) { final StringBuilder buff = new StringBuilder(); if (value == null || value.length() == 0) { return ""; } int index = 0; // $( )+.[^{\ while (index < value.length()) { final char current = value.charAt(index); switch (current) { case '.': buff.append("\\."); break; // case '/': // case '|': case '\\': buff.append("[\\\\|/]"); break; case '(': buff.append("\\("); break; case ')': buff.append("\\)"); break; case '[': buff.append("\\["); break; case ']': buff.append("\\]"); break; case '{': buff.append("\\{"); break; case '}': buff.append("\\}"); break; case '^': buff.append("\\^"); break; case '+': buff.append("\\+"); break; case '$': buff.append("\\$"); break; default: buff.append(current); } index++; } return buff.toString(); }
java
public void destroy() { final String methodName = "destroy"; if (TRACE.isEntryEnabled()) { SibTr.entry(this, TRACE, methodName); } RELOADING_MESSAGING_ENGINES.remove(_messagingEngine.getUuid() .toString()); synchronized (MESSAGING_ENGINES) { // Get the set of MEs for this ME's bus Set messagingEngines = (Set) MESSAGING_ENGINES.get(_messagingEngine .getBusName()); // Set should always exist if the engine initialized but // just in case... if (messagingEngines != null) { // Remove the destroyed ME messagingEngines.remove(_messagingEngine); // If the set is now empty, take it out of the map if (messagingEngines.isEmpty()) { MESSAGING_ENGINES.remove(_messagingEngine.getBusName()); } } else { if (TRACE.isDebugEnabled()) { SibTr.debug(this, TRACE, "Received destroy for unknown ME:", _messagingEngine); } } } // Get listeners to notify final Set listeners = getListeners(_messagingEngine.getBusName()); // Notify listeners for (final Iterator iterator = listeners.iterator(); iterator.hasNext();) { final SibRaMessagingEngineListener listener = (SibRaMessagingEngineListener) iterator .next(); listener.messagingEngineDestroyed(_messagingEngine); } _messagingEngine = null; if (TRACE.isEntryEnabled()) { SibTr.exit(this, TRACE, methodName); } }
java
public <T> T getObject(int columnIndex, Class<T> type) throws SQLException { throw SQLError.noSupport(); }
python
def post(self, uri, params={}, data={}): '''A generic method to make POST requests to the OpenDNS Investigate API on the given URI. ''' return self._session.post( urljoin(Investigate.BASE_URL, uri), params=params, data=data, headers=self._auth_header, proxies=self.proxies )
python
def install_theme(theme=None, monofont=None, monosize=11, nbfont=None, nbfontsize=13, tcfont=None, tcfontsize=13, dffontsize=93, outfontsize=85, mathfontsize=100, margins='auto', cellwidth='980', lineheight=170, cursorwidth=2, cursorcolor='default', altprompt=False, altmd=False, altout=False, hideprompt=False, vimext=False, toolbar=False, nbname=False, kernellogo=False, dfonts=False): """ Install theme to jupyter_customcss with specified font, fontsize, md layout, and toolbar pref """ # get working directory wkdir = os.path.abspath('./') stylefx.reset_default(False) stylefx.check_directories() doc = '\nConcatenated font imports, .less styles, & custom variables\n' s = '*' * 65 style_less = '\n'.join(['/*', s, s, doc, s, s, '*/']) style_less += '\n\n\n' style_less += '/* Import Notebook, Markdown, & Code Fonts */\n' # initialize style_less & style_css style_less = stylefx.set_font_properties( style_less=style_less, monofont=monofont, monosize=monosize, nbfont=nbfont, nbfontsize=nbfontsize, tcfont=tcfont, tcfontsize=tcfontsize, dffontsize=dffontsize, outfontsize=outfontsize, mathfontsize=mathfontsize, dfonts=dfonts) if theme is not None: # define some vars for cell layout cursorcolor = stylefx.get_colors(theme=theme, c=cursorcolor) style_less = stylefx.style_layout( style_less, theme=theme, cellwidth=cellwidth, margins=margins, lineheight=lineheight, altprompt=altprompt, altmd=altmd, altout=altout, hideprompt=hideprompt, cursorwidth=cursorwidth, cursorcolor=cursorcolor, vimext=vimext, toolbar=toolbar, nbname=nbname, kernellogo=kernellogo) # compile tempfile.less to css code and append to style_css style_css = stylefx.less_to_css(style_less) # append mathjax css & script to style_css style_css = stylefx.set_mathjax_style(style_css, mathfontsize) # install style_css to .jupyter/custom/custom.css stylefx.write_final_css(style_css) # change back to original working directory os.chdir(wkdir)
java
public void subtract (Area area) { if (area == null || isEmpty() || area.isEmpty()) { return; } if (isPolygonal() && area.isPolygonal()) { subtractPolygon(area); } else { subtractCurvePolygon(area); } if (areaBoundsSquare() < GeometryUtil.EPSILON) { reset(); } }
python
def list_installed(): ''' Return a list of all installed kernels. CLI Example: .. code-block:: bash salt '*' kernelpkg.list_installed ''' pkg_re = re.compile(r'^{0}-[\d.-]+-{1}$'.format( _package_prefix(), _kernel_type())) pkgs = __salt__['pkg.list_pkgs'](versions_as_list=True) if pkgs is None: pkgs = [] result = list(filter(pkg_re.match, pkgs)) if result is None: return [] prefix_len = len(_package_prefix()) + 1 if six.PY2: return sorted([pkg[prefix_len:] for pkg in result], cmp=_cmp_version) else: return sorted([pkg[prefix_len:] for pkg in result], key=functools.cmp_to_key(_cmp_version))
java
@Handler public void onRequestCompleted( Request.In.Completed event, IOSubchannel appChannel) throws InterruptedException { final Request.In requestEvent = event.event(); // A check that also works with null. if (Boolean.TRUE.equals(requestEvent.get()) || requestEvent.httpRequest().response().map( response -> response.statusCode() != HttpStatus.NOT_IMPLEMENTED .statusCode()) .orElse(false)) { // Some other component has taken care return; } // Check if "Not Found" should be sent if (providedFallbacks != null && providedFallbacks.contains(requestEvent.getClass())) { ResponseCreationSupport.sendResponse( requestEvent.httpRequest(), appChannel, HttpStatus.NOT_FOUND); return; } // Last resort ResponseCreationSupport.sendResponse(requestEvent.httpRequest(), appChannel, HttpStatus.NOT_IMPLEMENTED); }