language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public static <T, R extends Collection<T>> R reject( Iterable<T> iterable, Predicate<? super T> predicate, R target, boolean allowReorderedResult) { return ParallelIterate.reject( iterable, predicate, target, ParallelIterate.DEFAULT_MIN_FORK_SIZE, ParallelIterate.EXECUTOR_SERVICE, allowReorderedResult); }
python
def recvProxyData(self, data): """Write data to server""" if self.initialized: self.sendData(data) else: self.queued_data.append(data)
java
public static <T> Observable<T> map(Observable<?> fromObservable, final T toValue) { if (fromObservable != null) { return fromObservable.subscribeOn(Schedulers.io()) .map(new RXMapper<T>(toValue)); } else { return Observable.empty(); } }
java
private static double[] angleToCbCr(double angle) { double cb = Math.cos(angle); double cr = Math.sin(angle); double acb = Math.abs(cb); double acr = Math.abs(cr); double factor; if (acr > acb) { factor = 0.5 / acr; } else { factor = 0.5 / acb; } cb *= factor; cr *= factor; return new double[] {cb, cr}; }
java
public int doRecordChange(FieldInfo field, int iChangeType, boolean bDisplayOption) { if ((iChangeType == DBConstants.AFTER_ADD_TYPE) || (iChangeType == DBConstants.AFTER_UPDATE_TYPE)) if (!this.getOwner().getField(MessageLog.TIMEOUT_TIME).isNull()) { Date timeTimeout = ((DateTimeField)this.getOwner().getField(MessageLog.TIMEOUT_TIME)).getDateTime(); Date timeNow = new Date(); if (timeTimeout != null) { if ((m_lastTime == null) || (m_lastTime.getTime() <= timeNow.getTime() + EXTRA_TIME_MS)) { // All the waiting tasks have been run, ping the process to start up again. MessageManager messageManager = ((Application)this.getOwner().getTask().getApplication()).getMessageManager(); Map<String,Object> properties = new Hashtable<String,Object>(); properties.put(PrivateTaskScheduler.TIME_TO_RUN, timeTimeout); properties.put(PrivateTaskScheduler.NO_DUPLICATE, Constants.TRUE); properties.put(DBParams.PROCESS, MessageTimeoutProcess.class.getName()); if (messageManager != null) messageManager.sendMessage(new MapMessage(new BaseMessageHeader(MessageTimeoutProcess.TIMEOUT_QUEUE_NAME, MessageConstants.INTRANET_QUEUE, this, null), properties)); } } m_lastTime = timeTimeout; } return super.doRecordChange(field, iChangeType, bDisplayOption); }
java
private void cookieError(final OtpLocalNode local, final OtpErlangAtom cookie) throws OtpAuthException { try { @SuppressWarnings("resource") final OtpOutputStream header = new OtpOutputStream(headerLen); // preamble: 4 byte length + "passthrough" tag + version header.write4BE(0); // reserve space for length header.write1(passThrough); header.write1(version); header.write_tuple_head(4); header.write_long(regSendTag); header.write_any(local.createPid()); // disposable pid header.write_atom(cookie.atomValue()); // important: his cookie, // not mine... header.write_atom("auth"); // version for payload header.write1(version); // the payload // the no_auth message (copied from Erlang) Don't change this // (Erlang will crash) // {$gen_cast, {print, "~n** Unauthorized cookie ~w **~n", // [foo@aule]}} final OtpErlangObject[] msg = new OtpErlangObject[2]; final OtpErlangObject[] msgbody = new OtpErlangObject[3]; msgbody[0] = new OtpErlangAtom("print"); msgbody[1] = new OtpErlangString("~n** Bad cookie sent to " + local + " **~n"); // Erlang will crash and burn if there is no third argument here... msgbody[2] = new OtpErlangList(); // empty list msg[0] = new OtpErlangAtom("$gen_cast"); msg[1] = new OtpErlangTuple(msgbody); @SuppressWarnings("resource") final OtpOutputStream payload = new OtpOutputStream( new OtpErlangTuple(msg)); // fix up length in preamble header.poke4BE(0, header.size() + payload.size() - 4); try { do_send(header, payload); } catch (final IOException e) { } // ignore } finally { close(); } throw new OtpAuthException("Remote cookie not authorized: " + cookie.atomValue()); }
java
@Override public String findNodeId(XPathExpression xpathExpression) throws PortalException { try { return xpathExpression.evaluate(this.layout); } catch (XPathExpressionException e) { throw new PortalException( "Exception while executing XPathExpression: " + xpathExpression, e); } }
python
def namingConventionDecorator(self, namingConvention): """ :type namingConvention:INamingConvention """ def decoratorFunction(cls): SyntheticClassController(cls).setNamingConvention(namingConvention) return cls return decoratorFunction
python
def logout_handler(self, **args): """Handler for logout button. Delete cookies and return HTML that immediately closes window """ response = make_response( "<html><script>window.close();</script></html>", 200, {'Content-Type': "text/html"}) response.set_cookie(self.account_cookie_name, expires=0) response.set_cookie(self.access_cookie_name, expires=0) response.headers['Access-Control-Allow-Origin'] = '*' return response
java
public void marshall(ExportBundleRequest exportBundleRequest, ProtocolMarshaller protocolMarshaller) { if (exportBundleRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(exportBundleRequest.getBundleId(), BUNDLEID_BINDING); protocolMarshaller.marshall(exportBundleRequest.getProjectId(), PROJECTID_BINDING); protocolMarshaller.marshall(exportBundleRequest.getPlatform(), PLATFORM_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public final void mT__85() throws RecognitionException { try { int _type = T__85; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalSARL.g:71:7: ( 'strictfp' ) // InternalSARL.g:71:9: 'strictfp' { match("strictfp"); } state.type = _type; state.channel = _channel; } finally { } }
python
def sectionByID(self, sectionID): """ Returns the :class:`~plexapi.library.LibrarySection` that matches the specified sectionID. Parameters: sectionID (str): ID of the section to return. """ if not self._sectionsByID or sectionID not in self._sectionsByID: self.sections() return self._sectionsByID[sectionID]
python
def posterior_predictive_to_xarray(self): """Convert posterior_predictive samples to xarray.""" posterior = self.posterior posterior_predictive = self.posterior_predictive data = get_draws(posterior, variables=posterior_predictive) return dict_to_dataset(data, library=self.pystan, coords=self.coords, dims=self.dims)
python
def get_stream(self, stream_name: str) -> StreamWrapper: """ Get a :py:class:`StreamWrapper` with the given name. :param stream_name: stream name :return: dataset function name providing the respective stream :raise AttributeError: if the dataset does not provide the function creating the stream """ if stream_name not in self._streams: stream_fn_name = '{}_stream'.format(stream_name) try: stream_fn = getattr(self._dataset, stream_fn_name) stream_epoch_limit = -1 if self._fixed_epoch_size is not None and stream_name == self._train_stream_name: stream_epoch_limit = self._fixed_epoch_size self._streams[stream_name] = StreamWrapper(stream_fn, buffer_size=self._buffer, epoch_size=stream_epoch_limit, name=stream_name, profile=self._epoch_profile) except AttributeError as ex: raise AttributeError('The dataset does not have a function for creating a stream named `{}`. ' 'The function has to be named `{}`.'.format(stream_name, stream_fn_name)) from ex return self._streams[stream_name]
java
public static void setImage(String path, Map<String, String> attributes) { CmsJSONMap attributesJS = CmsJSONMap.createJSONMap(); for (Entry<String, String> entry : attributes.entrySet()) { attributesJS.put(entry.getKey(), entry.getValue()); } nativeSetImage(path, attributesJS); }
python
def template_str(tem, queue=False, **kwargs): ''' Execute the information stored in a string from an sls template CLI Example: .. code-block:: bash salt '*' state.template_str '<Template String>' ''' conflict = _check_queue(queue, kwargs) if conflict is not None: return conflict opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) try: st_ = salt.state.State(opts, proxy=__proxy__, initial_pillar=_get_initial_pillar(opts)) except NameError: st_ = salt.state.State(opts, initial_pillar=_get_initial_pillar(opts)) ret = st_.call_template_str(tem) _set_retcode(ret) return ret
python
def _return_wrapper(fits, return_all, start, trace): """If the user wants to get all of the models back, this will return a list of the ARIMA models, otherwise it will just return the model. If this is called from the end of the function, ``fits`` will already be a list. We *know* that if a function call makes it here, ``fits`` is NOT None or it would have thrown an exception in :func:`_post_ppc_arima`. Parameters ---------- fits : iterable or ARIMA The ARIMA(s) return_all : bool Whether to return all. """ # make sure it's an iterable if not is_iterable(fits): fits = [fits] # whether to print the final runtime if trace: print('Total fit time: %.3f seconds' % (time.time() - start)) # which to return? if not all, then first index (assume sorted) if not return_all: return fits[0] return fits
python
def _VerifyHMAC(self, comms=None): """Verifies the HMAC. This method raises a DecryptionError if the received HMAC does not verify. If the HMAC verifies correctly, True is returned. Args: comms: The comms RdfValue to verify. Raises: DecryptionError: The HMAC did not verify. Returns: True """ # Check the encrypted message integrity using HMAC. if self.hmac_type == "SIMPLE_HMAC": msg = comms.encrypted digest = comms.hmac elif self.hmac_type == "FULL_HMAC": msg = b"".join([ comms.encrypted, comms.encrypted_cipher, comms.encrypted_cipher_metadata, comms.packet_iv.SerializeToString(), struct.pack("<I", comms.api_version) ]) digest = comms.full_hmac else: raise DecryptionError("HMAC type no supported.") try: rdf_crypto.HMAC(self.cipher.hmac_key).Verify(msg, digest) except rdf_crypto.VerificationError as e: raise DecryptionError("HMAC verification failed: %s" % e) return True
java
public DashedHorizontalLine dashedHorizontalLineInstance() { LineObject lineObject = new LineObject(); DashedHorizontalLine dashedHorizontalLine = lineObject.dashedHorizontalLineInstance(); objectsInstance().add(lineObject); return dashedHorizontalLine; }
java
@RestrictTo(RestrictTo.Scope.LIBRARY) public int incrementDepthBy(String key, int depth) { if (isContainer(key)) { // If it's a container then we added programatically and it isn't a part of the keypath. return 0; } if (!keys.get(depth).equals("**")) { // If it's not a globstar then it is part of the keypath. return 1; } if (depth == keys.size() - 1) { // The last key is a globstar. return 0; } if (keys.get(depth + 1).equals(key)) { // We are a globstar and the next key is our current key so consume both. return 2; } return 0; }
java
public final void configure(Properties props, FormatterBuilder formatterBuilder) { Map<URI, ImporterConfig> configs = m_factory.createImporterConfigurations(props, formatterBuilder); m_configs = new ImmutableMap.Builder<URI, ImporterConfig>() .putAll(configs) .putAll(Maps.filterKeys(m_configs, not(in(configs.keySet())))) .build(); }
python
def diskdata(): """Get total disk size in GB.""" p = os.popen("/bin/df -l -P") ddata = {} tsize = 0 for line in p.readlines(): d = line.split() if ("/dev/sd" in d[0] or "/dev/hd" in d[0] or "/dev/mapper" in d[0]): tsize = tsize + int(d[1]) ddata["Disk_GB"] = int(tsize)/1000000 p.close() return ddata
python
def log_future_exceptions(logger, f, ignore=()): """Log any exceptions set to a future Parameters ---------- logger : logging.Logger instance logger.exception(...) is called if the future resolves with an exception f : Future object Future to be monitored for exceptions ignore : Exception or tuple of Exception Exptected exception(s) to ignore, i.e. they will not be logged. Notes ----- This is useful when an async task is started for its side effects without waiting for the result. The problem is that if the future's resolution is not checked for exceptions, unhandled exceptions in the async task will be silently ignored. """ def log_cb(f): try: f.result() except ignore: pass except Exception: logger.exception('Unhandled exception returned by future') f.add_done_callback(log_cb)
python
def create_refobject(self, ): """Create a refobject in the scene that represents the :class:`Reftrack` instance. .. Note:: This will not set the reftrack object. :returns: the created reftrack object :rtype: scene object :raises: None """ parent = self.get_parent() if parent: prefobj = parent.get_refobj() else: prefobj = None refobj = self.get_refobjinter().create(self.get_typ(), self.get_id(), prefobj) return refobj
java
private Address chooseTargetReplica(Collection<Address> excludedAddresses) { final List<Address> replicaAddresses = getReplicaAddresses(excludedAddresses); if (replicaAddresses.isEmpty()) { return null; } final int randomReplicaIndex = ThreadLocalRandomProvider.get().nextInt(replicaAddresses.size()); return replicaAddresses.get(randomReplicaIndex); }
java
protected Map<String, String> getElementAttributes() { // Preserve order of attributes Map<String, String> attrs = new HashMap<>(); if (this.getName() != null) { attrs.put("name", this.getName()); } if (this.getValue() != null) { attrs.put("value", this.getValue()); } return attrs; }
python
def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted
python
def get_value(self, label, takeable=False): """ Retrieve single value at passed index label .. deprecated:: 0.21.0 Please use .at[] or .iat[] accessors. Parameters ---------- index : label takeable : interpret the index as indexers, default False Returns ------- value : scalar value """ warnings.warn("get_value is deprecated and will be removed " "in a future release. Please use " ".at[] or .iat[] accessors instead", FutureWarning, stacklevel=2) return self._get_value(label, takeable=takeable)
java
public static double[] logs2probs(double[] a) { double max = a[maxIndex(a)]; double sum = 0.0; double[] result = new double[a.length]; for(int i = 0; i < a.length; i++) { result[i] = Math.exp(a[i] - max); sum += result[i]; } normalize(result, sum); return result; }
python
def _create_struct_field(self, env, stone_field): """ This function resolves symbols to objects that we've instantiated in the current environment. For example, a field with data type named "String" is pointed to a String() object. The caller needs to ensure that this stone_field is for a Struct and not for a Union. Returns: stone.data_type.StructField: A field of a struct. """ if isinstance(stone_field, AstVoidField): raise InvalidSpec( 'Struct field %s cannot have a Void type.' % quote(stone_field.name), stone_field.lineno, stone_field.path) data_type = self._resolve_type(env, stone_field.type_ref) annotations = [self._resolve_annotation_type(env, annotation) for annotation in stone_field.annotations] if isinstance(data_type, Void): raise InvalidSpec( 'Struct field %s cannot have a Void type.' % quote(stone_field.name), stone_field.lineno, stone_field.path) elif isinstance(data_type, Nullable) and stone_field.has_default: raise InvalidSpec('Field %s cannot be a nullable ' 'type and have a default specified.' % quote(stone_field.name), stone_field.lineno, stone_field.path) api_type_field = StructField( name=stone_field.name, data_type=data_type, doc=stone_field.doc, ast_node=stone_field, ) api_type_field.set_annotations(annotations) return api_type_field
java
@Override public CommerceWishListItem findByCommerceWishListId_Last( long commerceWishListId, OrderByComparator<CommerceWishListItem> orderByComparator) throws NoSuchWishListItemException { CommerceWishListItem commerceWishListItem = fetchByCommerceWishListId_Last(commerceWishListId, orderByComparator); if (commerceWishListItem != null) { return commerceWishListItem; } StringBundler msg = new StringBundler(4); msg.append(_NO_SUCH_ENTITY_WITH_KEY); msg.append("commerceWishListId="); msg.append(commerceWishListId); msg.append("}"); throw new NoSuchWishListItemException(msg.toString()); }
python
def read(self, include_deleted=False): """Return only read items in the current queryset""" if is_soft_delete() and not include_deleted: return self.filter(unread=False, deleted=False) # When SOFT_DELETE=False, developers are supposed NOT to touch 'deleted' field. # In this case, to improve query performance, don't filter by 'deleted' field return self.filter(unread=False)
java
public Object[] jcrValues( Property<?> property ) { @SuppressWarnings( "unchecked" ) List<Object> values = (List<Object>)property.getValues(); // convert CMIS values to JCR values switch (property.getType()) { case STRING: return asStrings(values); case BOOLEAN: return asBooleans(values); case DECIMAL: return asDecimals(values); case INTEGER: return asIntegers(values); case DATETIME: return asDateTime(values); case URI: return asURI(values); case ID: return asIDs(values); case HTML: return asHTMLs(values); default: return null; } }
python
def split_multimol2(mol2_path): r""" Splits a multi-mol2 file into individual Mol2 file contents. Parameters ----------- mol2_path : str Path to the multi-mol2 file. Parses gzip files if the filepath ends on .gz. Returns ----------- A generator object for lists for every extracted mol2-file. Lists contain the molecule ID and the mol2 file contents. e.g., ['ID1234', ['@<TRIPOS>MOLECULE\n', '...']]. Note that bytestrings are returned (for reasons of efficieny) if the Mol2 content is read from a gzip (.gz) file. """ if mol2_path.endswith('.gz'): open_file = gzip.open read_mode = 'rb' else: open_file = open read_mode = 'r' check = {'rb': b'@<TRIPOS>MOLECULE', 'r': '@<TRIPOS>MOLECULE'} with open_file(mol2_path, read_mode) as f: mol2 = ['', []] while True: try: line = next(f) if line.startswith(check[read_mode]): if mol2[0]: yield(mol2) mol2 = ['', []] mol2_id = next(f) mol2[0] = mol2_id.rstrip() mol2[1].append(line) mol2[1].append(mol2_id) else: mol2[1].append(line) except StopIteration: yield(mol2) return
python
def archive_url(self, entity_id, channel=None): '''Generate a URL for the archive of an entity.. @param entity_id The ID of the entity to look up as a string or reference. @param channel Optional channel name. ''' url = '{}/{}/archive'.format(self.url, _get_path(entity_id)) return _add_channel(url, channel)
java
@Override public CounterOperation decrement(final String counterName, final long amount) { Preconditions.checkNotNull(counterName); Preconditions.checkArgument(!StringUtils.isBlank(counterName)); Preconditions.checkArgument(amount > 0, "Decrement amounts must be positive!"); // Perform the Decrement final long decrementAmount = amount * -1L; return this.mutateCounterShard(counterName, decrementAmount, Optional.<Integer> absent(), UUID.randomUUID()); }
java
@Override public final Map<String, Object> getUserAttributes(final String uid, final IPersonAttributeDaoFilter filter) { if (!this.enabled) { return null; } Validate.notNull(uid, "uid may not be null."); //Get the attributes from the subclass final Map<String, List<Object>> multivaluedUserAttributes = this.getMultivaluedUserAttributes(uid, filter); return this.flattenResults(multivaluedUserAttributes); }
python
def get_stp_brief_info_output_has_more(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_brief_info = ET.Element("get_stp_brief_info") config = get_stp_brief_info output = ET.SubElement(get_stp_brief_info, "output") has_more = ET.SubElement(output, "has-more") has_more.text = kwargs.pop('has_more') callback = kwargs.pop('callback', self._callback) return callback(config)
java
public Object endPoint(Object point, String from, String to) { JSONArray coords = ((JSONArray) ((LinkedHashMap) point) .get("coordinates")); try { double[] xy = GeotoolsUtils.transform( from, to, new double[] { Double.valueOf(String.valueOf(coords.get(0))), Double.valueOf(String.valueOf(coords.get(1))) }); if (xy != null) { coords.set(0, xy[0]); coords.set(1, xy[1]); ((LinkedHashMap) point).put("coordinates", coords); } } catch (Exception e) { e.printStackTrace(); } return point; }
java
public StyleSheet parse(Object source, NetworkProcessor network, String encoding, SourceType type, URL base) throws IOException, CSSException { if (type == SourceType.INLINE) throw new IllegalArgumentException( "Missing element for INLINE input"); return parse(source, network, encoding, type, null, false, base); }
python
def generate(self): """ Generates and returns a numeric captcha image in base64 format. Saves the correct answer in `session['captcha_answer']` Use later as: src = captcha.generate() <img src="{{src}}"> """ answer = self.rand.randrange(self.max) answer = str(answer).zfill(self.digits) image_data = self.image_generator.generate(answer) base64_captcha = base64.b64encode(image_data.getvalue()).decode("ascii") logging.debug('Generated captcha with answer: ' + answer) session['captcha_answer'] = answer return base64_captcha
java
public int getCumulativeProgress(){ int numChildren = getChildCount(); int total = 0; for (int i = 0; i < numChildren; i++) { total += getChildProgress(i); } checkCumulativeSmallerThanMax(maxProgress, total); return total; }
java
public void init(Marshaller marshaller, OperationsFactory operationsFactory, int estimateKeySize, int estimateValueSize, int batchSize) { this.defaultMarshaller = marshaller; this.operationsFactory = operationsFactory; this.estimateKeySize = estimateKeySize; this.estimateValueSize = estimateValueSize; this.batchSize = batchSize; this.dataFormat = defaultDataFormat; }
java
public void retrieveMatchingConsumerPoints( SIBUuid12 destUuid, SIBUuid8 cmUuid, JsMessage msg, MessageProcessorSearchResults searchResults) throws SIDiscriminatorSyntaxException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry( tc, "retrieveMatchingConsumerPoints", new Object[] { destUuid, cmUuid, msg, searchResults }); // Form a branch for the MatchSpace from a combination of the destination uuid and // the CM uuid String mSpacePrefix = destUuid.toString() + MatchSpace.SUBTOPIC_SEPARATOR_CHAR + cmUuid.toString(); //Retrieve the set of wrapped consumer points from the matchspace try { // Set up Results object to hold the results from the MatchSpace traversal searchResults.reset(); // Set up an evaluation cache (need to keep one of these per thread. Newing up is expensive) EvalCache cache = _matching.createEvalCache(); String discriminator = msg.getDiscriminator(); if (discriminator != null) { try { _syntaxChecker.checkEventTopicSyntax(discriminator); String theTopic = buildSendTopicExpression(mSpacePrefix, discriminator); search(theTopic, // keyed on destination name (MatchSpaceKey) msg, cache, searchResults); } catch (InvalidTopicSyntaxException e) { // No FFDC code needed if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "retrieveMatchingConsumerPoints", e); throw new SIDiscriminatorSyntaxException( nls.getFormattedMessage( "INVALID_TOPIC_ERROR_CWSIP0372", new Object[] { discriminator }, null)); } } else { //no discriminator search(mSpacePrefix, // keyed on destination name/concumer manager combo (MatchSpaceKey) msg, cache, searchResults); } } catch (BadMessageFormatMatchingException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.matching.MessageProcessorMatching.retrieveMatchingConsumerPoints", "1:759:1.117.1.11", this); SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "retrieveMatchingConsumerPoints", e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching", "1:770:1.117.1.11", e }); throw new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching", "1:778:1.117.1.11", e }, null), e); } catch (MatchingException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.matching.MessageProcessorMatching.retrieveMatchingConsumerPoints", "1:789:1.117.1.11", this); SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "retrieveMatchingConsumerPoints", "SIErrorException"); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching", "1:800:1.117.1.11", e }); throw new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching", "1:808:1.117.1.11", e }, null), e); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "retrieveMatchingConsumerPoints"); }
java
public static boolean reflectionEquals(final Object lhs, final Object rhs, final boolean testTransients, final Class<?> reflectUpToClass, final String... excludeFields) { if (lhs == rhs) { return true; } if (lhs == null || rhs == null) { return false; } // Find the leaf class since there may be transients in the leaf // class or in classes between the leaf and root. // If we are not testing transients or a subclass has no ivars, // then a subclass can test equals to a superclass. final Class<?> lhsClass = lhs.getClass(); final Class<?> rhsClass = rhs.getClass(); Class<?> testClass; if (lhsClass.isInstance(rhs)) { testClass = lhsClass; if (!rhsClass.isInstance(lhs)) { // rhsClass is a subclass of lhsClass testClass = rhsClass; } } else if (rhsClass.isInstance(lhs)) { testClass = rhsClass; if (!lhsClass.isInstance(rhs)) { // lhsClass is a subclass of rhsClass testClass = lhsClass; } } else { // The two classes are not related. return false; } final EqualsBuilder equalsBuilder = new EqualsBuilder(); try { if (testClass.isArray()) { equalsBuilder.append(lhs, rhs); } else { reflectionAppend(lhs, rhs, testClass, equalsBuilder, testTransients, excludeFields); while (testClass.getSuperclass() != null && testClass != reflectUpToClass) { testClass = testClass.getSuperclass(); reflectionAppend(lhs, rhs, testClass, equalsBuilder, testTransients, excludeFields); } } } catch (final IllegalArgumentException e) { // In this case, we tried to test a subclass vs. a superclass and // the subclass has ivars or the ivars are transient and // we are testing transients. // If a subclass has ivars that we are trying to test them, we get an // exception and we know that the objects are not equal. return false; } return equalsBuilder.isEquals(); }
java
protected String handleClientError(HttpServletRequest request, HttpServletResponse response, RequestClientErrorException cause) throws IOException { final boolean beforeHandlingCommitted = response.isCommitted(); // basically false processClientErrorCallback(request, response, cause); final String title; if (response.isCommitted()) { title = response.getStatus() + " (thrown as " + cause.getTitle() + ")"; if (beforeHandlingCommitted) { // basically no way but just in case showCliEx(cause, () -> { final StringBuilder sb = new StringBuilder(); sb.append("*Cannot send error as '").append(title).append("' because of already committed:"); sb.append(" path=").append(request.getRequestURI()); return sb.toString(); }); return title; // cannot help it } // committed in callback process } else { title = cause.getTitle(); } showCliEx(cause, () -> { final StringBuilder sb = new StringBuilder(); sb.append(LF).append("_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/_/"); sb.append(LF).append("...Sending error as '").append(title).append("' manually"); sb.append(" #").append(Integer.toHexString(cause.hashCode())); sb.append(LF).append(" Request: ").append(request.getRequestURI()); final String queryString = request.getQueryString(); if (queryString != null && !queryString.isEmpty()) { sb.append("?").append(queryString); } sb.append(LF); buildRequestHeaders(sb, request); buildRequestAttributes(sb, request, /*showErrorFlush*/true); buildSessionAttributes(sb, request, /*showErrorFlush*/true); sb.append(" Exception: ").append(cause.getClass().getName()); sb.append(LF).append(" Message: "); final String causeMsg = cause.getMessage(); if (causeMsg != null && causeMsg.contains(LF)) { sb.append(LF).append("/- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -"); sb.append(LF).append(causeMsg); sb.append(LF).append("- - - - - - - - - -/"); } else { sb.append(causeMsg); } sb.append(LF).append(" Stack Traces:"); buildClientErrorStackTrace(cause, sb, 0); sb.append(LF); sb.append("_/_/_/_/_/_/_/_/_/_/"); return sb.toString(); }); try { if (!response.isCommitted()) { // because may be committed in callback process response.sendError(cause.getErrorStatus()); } return title; } catch (IOException sendEx) { final String msg = "Failed to send error as '" + title + "': " + sendEx.getMessage(); if (errorLogging) { logger.error(msg); } else { showCliEx(cause, () -> msg); } return title; // cannot help it } }
java
static public void close(FileObject fo){ if (fo != null){ try { fo.close(); } catch (FileSystemException e) { log.debug("Exception when closing FileObject: " + fo.getName(), e); } } }
python
def default(self, style_type): """ Return the default style for *style_type* or |None| if no default is defined for that type (not common). """ style = self._element.default_for(style_type) if style is None: return None return StyleFactory(style)
java
File findRepoPath(final String repoAlias) { if ("user".equals(repoAlias)) { return new File(System.getProperty("user.home") + CeylonUtil.PATH_SEPARATOR + ".ceylon/repo"); } else if ("cache".equals(repoAlias)) { return new File(System.getProperty("user.home") + CeylonUtil.PATH_SEPARATOR + ".ceylon/cache"); } else if ("system".equals(repoAlias)) { throw new IllegalArgumentException("Ceylon Repository 'system' should not be written to"); } else if ("remote".equals(repoAlias)) { throw new IllegalArgumentException("Ceylon Repository 'remote' should use the ceylon:deploy Maven goal"); } else if ("local".equals(repoAlias)) { return new File(project.getBasedir(), "modules"); } else { throw new IllegalArgumentException( "Property ceylonRepository must one of 'user', 'cache' or 'local'. Defaults to 'user'"); } }
java
@Override public Byte set(int index, Byte element) { return set(index, element.byteValue()); }
python
def _all_number_groups_are_exactly_present(numobj, normalized_candidate, formatted_number_groups): """Returns True if the groups of digits found in our candidate phone number match our expectations. Arguments: numobj -- the original number we found when parsing normalized_candidate -- the candidate number, normalized to only contain ASCII digits, but with non-digits (spaces etc) retained expected_number_groups -- the groups of digits that we would expect to see if we formatted this number Returns True if expectations matched. """ candidate_groups = re.split(NON_DIGITS_PATTERN, normalized_candidate) # Set this to the last group, skipping it if the number has an extension. if numobj.extension is not None: candidate_number_group_index = len(candidate_groups) - 2 else: candidate_number_group_index = len(candidate_groups) - 1 # First we check if the national significant number is formatted as a # block. We use contains and not equals, since the national significant # number may be present with a prefix such as a national number prefix, or # the country code itself. if (len(candidate_groups) == 1 or candidate_groups[candidate_number_group_index].find(national_significant_number(numobj)) != -1): return True # Starting from the end, go through in reverse, excluding the first group, # and check the candidate and number groups are the same. formatted_number_group_index = len(formatted_number_groups) - 1 while (formatted_number_group_index > 0 and candidate_number_group_index >= 0): if (candidate_groups[candidate_number_group_index] != formatted_number_groups[formatted_number_group_index]): return False formatted_number_group_index -= 1 candidate_number_group_index -= 1 # Now check the first group. There may be a national prefix at the start, so we only check # that the candidate group ends with the formatted number group. return (candidate_number_group_index >= 0 and candidate_groups[candidate_number_group_index].endswith(formatted_number_groups[0]))
java
public <X> DataSet<X> runOperation(CustomUnaryOperation<T, X> operation) { Validate.notNull(operation, "The custom operator must not be null."); operation.setInput(this); return operation.createResult(); }
python
def bethe_fermi_ene(energy, quasipart, shift, hopping, beta): """product of the bethe lattice dos, fermi distribution an weighted by energy""" return energy * bethe_fermi(energy, quasipart, shift, hopping, beta)
python
def old(self): """Assess to the state value(s) at beginning of the time step, which has been processed most recently. When using *HydPy* in the normal manner. But it can be helpful for demonstration and debugging purposes. """ value = getattr(self.fastaccess_old, self.name, None) if value is None: raise RuntimeError( 'No value/values of sequence %s has/have ' 'not been defined so far.' % objecttools.elementphrase(self)) else: if self.NDIM: value = numpy.asarray(value) return value
java
public List<TaskSummary> doCriteriaQuery(String userId, UserGroupCallback userGroupCallback, QueryWhere queryWhere) { // 1. create builder and query instances CriteriaBuilder builder = getCriteriaBuilder(); CriteriaQuery<Tuple> criteriaQuery = builder.createTupleQuery(); // 2. query base Root<TaskImpl> taskRoot = criteriaQuery.from(TaskImpl.class); criteriaQuery = criteriaQuery.multiselect( taskRoot.get(TaskImpl_.id), // 0 taskRoot.get(TaskImpl_.name), // 1 taskRoot.get(TaskImpl_.subject), // 2 taskRoot.get(TaskImpl_.description), // 3 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.status), // 4 taskRoot.get(TaskImpl_.priority), // 5 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.skipable), // 6 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.actualOwner).get(UserImpl_.id), // 7 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.createdBy).get(UserImpl_.id), // 8 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.createdOn), // 9 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.activationTime), // 10 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.expirationTime), // 11 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.processId), // 12 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.processSessionId), // 13 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.processInstanceId), // 14 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.deploymentId), // 15 taskRoot.get(TaskImpl_.subTaskStrategy), // 16 taskRoot.get(TaskImpl_.taskData).get(TaskDataImpl_.parentId) // 17 ); taskRoot.join(TaskImpl_.taskData); // added for convienence sake, since other logic expects to find this join addUserGroupsViaCallBackToQueryWhere(userGroupCallback, queryWhere.getCriteria()); // 3. check to see if there's already a user(/security)-limitation in the search checkExistingCriteriaForUserBasedLimit(queryWhere, userId, userGroupCallback); for( QueryModificationService queryModificationService : queryModificationServiceLoader ) { queryModificationService.optimizeCriteria(queryWhere); } // 4. process query criteria fillCriteriaQuery(criteriaQuery, queryWhere, builder, TaskImpl.class); // 5. retrieve result (after also applying meta-criteria) useDistinctWhenLefOuterJoinsPresent(criteriaQuery); List<Tuple> result = createQueryAndCallApplyMetaCriteriaAndGetResult(queryWhere, criteriaQuery, builder); List<TaskSummary> taskSummaryList = new ArrayList<TaskSummary>(result.size()); for( Tuple tupleRow : result ) { int i = 0; //@formatter:off TaskSummaryImpl taskSummaryImpl = new TaskSummaryImpl( // id tupleRow.get(i++, Long.class), // name, subject, description tupleRow.get(i++, String.class), tupleRow.get(i++, String.class), tupleRow.get(i++, String.class), // status, prio, skippable tupleRow.get(i++, Status.class), tupleRow.get(i++, Integer.class), tupleRow.get(i++, Boolean.class), // actual owner, created by tupleRow.get(i++, String.class), tupleRow.get(i++, String.class), // created on, activation time, expiration time tupleRow.get(i++, Date.class), tupleRow.get(i++, Date.class), tupleRow.get(i++, Date.class), // process id, process session id, process inst id, deployment id tupleRow.get(i++, String.class), tupleRow.get(i++, Long.class), tupleRow.get(i++, Long.class), tupleRow.get(i++, String.class), tupleRow.get(i++, SubTasksStrategy.class), tupleRow.get(i++, Long.class) ); //@formatter:on taskSummaryList.add(taskSummaryImpl); } return taskSummaryList; }
python
def api_reference(root_url, service, version): """Generate URL for a Taskcluster api reference.""" root_url = root_url.rstrip('/') if root_url == OLD_ROOT_URL: return 'https://references.taskcluster.net/{}/{}/api.json'.format(service, version) else: return '{}/references/{}/{}/api.json'.format(root_url, service, version)
java
protected void scheduleResize() { if (m_updateSizeTimer != null) { m_updateSizeTimer.cancel(); } m_updateSizeTimer = new Timer() { @Override public void run() { updateContentSize(); } }; m_updateSizeTimer.schedule(300); }
java
static PrefsTransform getUtilTransform(TypeName type) { String typeName = type.toString(); // Integer.class.getCanonicalName().equals(typeName) if (Date.class.getCanonicalName().equals(typeName)) { return new DatePrefsTransform(); } if (Locale.class.getCanonicalName().equals(typeName)) { return new LocalePrefsTransform(); } if (Currency.class.getCanonicalName().equals(typeName)) { return new CurrencyPrefsTransform(); } if (Calendar.class.getCanonicalName().equals(typeName)) { return new CalendarPrefsTransform(); } if (TimeZone.class.getCanonicalName().equals(typeName)) { return new TimeZonePrefsTransform(); } return null; }
python
def tax_class_based_on(self, tax_class_based_on): """Sets the tax_class_based_on of this TaxSettings. :param tax_class_based_on: The tax_class_based_on of this TaxSettings. :type: str """ allowed_values = ["shippingAddress", "billingAddress"] # noqa: E501 if tax_class_based_on is not None and tax_class_based_on not in allowed_values: raise ValueError( "Invalid value for `tax_class_based_on` ({0}), must be one of {1}" # noqa: E501 .format(tax_class_based_on, allowed_values) ) self._tax_class_based_on = tax_class_based_on
python
def start_step(self, lineno, name="Unnamed step", timestamp=None): """Create a new step and update the state to reflect we're now in the middle of a step.""" self.state = self.STATES['step_in_progress'] self.stepnum += 1 self.steps.append({ "name": name, "started": timestamp, "started_linenumber": lineno, "errors": [], })
python
def remove_html_tag(input_str='', tag=None): """ Returns a string with the html tag and all its contents from a string """ result = input_str if tag is not None: pattern = re.compile('<{tag}[\s\S]+?/{tag}>'.format(tag=tag)) result = re.sub(pattern, '', str(input_str)) return result
python
def isubset(self, *keys): # type: (*Hashable) -> ww.g """Return key, self[key] as generator for key in keys. Raise KeyError if a key does not exist Args: keys: Iterable containing keys Example: >>> from ww import d >>> list(d({1: 1, 2: 2, 3: 3}).isubset(1, 3)) [(1, 1), (3, 3)] """ return ww.g((key, self[key]) for key in keys)
python
def is_one2one(fmt): """ Runs a check to evaluate if the format string has a one to one correspondence. I.e. that successive composing and parsing opperations will result in the original data. In other words, that input data maps to a string, which then maps back to the original data without any change or loss in information. Note: This test only applies to sensible usage of the format string. If string or numeric data is causes overflow, e.g. if composing "abcd" into {3s}, one to one correspondence will always be broken in such cases. This of course also applies to precision losses when using datetime data. """ data = _generate_data_for_format(fmt) if data is None: return False # run data forward once and back to data stri = compose(fmt, data) data2 = parse(fmt, stri) # check if data2 equal to original data if len(data) != len(data2): return False for key in data: if key not in data2: return False if data2[key] != data[key]: return False # all checks passed, so just return True return True
java
static int execute(Arg arguments, PrintStream stream, PrintStream errorStream) { if (arguments == null) { return 2; } if (arguments.checkBcryptHash != null) { // verify mode BCrypt.Result result = BCrypt.verifyer().verify(arguments.password, arguments.checkBcryptHash); if (!result.validFormat) { System.err.println("Invalid bcrypt format."); return 3; } if (result.verified) { stream.println("Hash verified."); } else { errorStream.println("Provided hash does not verify against given password."); return 1; } } else { // hash mode byte[] salt = arguments.salt == null ? Bytes.random(16).array() : arguments.salt; byte[] hash = BCrypt.withDefaults().hash(arguments.costFactor, salt, charArrayToByteArray(arguments.password, StandardCharsets.UTF_8)); stream.println(new String(hash, StandardCharsets.UTF_8)); } return 0; }
python
def match_dim_specs(specs1, specs2): """Matches dimension specs used to link axes. Axis dimension specs consists of a list of tuples corresponding to each dimension, each tuple spec has the form (name, label, unit). The name and label must match exactly while the unit only has to match if both specs define one. """ if (specs1 is None or specs2 is None) or (len(specs1) != len(specs2)): return False for spec1, spec2 in zip(specs1, specs2): for s1, s2 in zip(spec1, spec2): if s1 is None or s2 is None: continue if s1 != s2: return False return True
java
public final void setNewVertexValue(VV newValue) { if (setNewVertexValueCalled) { throw new IllegalStateException("setNewVertexValue should only be called at most once per updateVertex"); } setNewVertexValueCalled = true; outVertex.f1 = newValue; out.collect(Either.Left(outVertex)); }
java
public boolean containsKey(Object key, Transaction transaction) throws ObjectManagerException { try { for (Iterator iterator = entrySet().iterator();;) { Entry entry = (Entry) iterator.next(transaction); Object entryKey = entry.getKey(); if (key == entryKey || key.equals(entryKey)) { return true; } } } catch (java.util.NoSuchElementException exception) { // No FFDC code needed, just exited search. return false; } // try. }
java
@Deprecated public SingleOutputStreamOperator<T> assignTimestamps(TimestampExtractor<T> extractor) { // match parallelism to input, otherwise dop=1 sources could lead to some strange // behaviour: the watermark will creep along very slowly because the elements // from the source go to each extraction operator round robin. int inputParallelism = getTransformation().getParallelism(); ExtractTimestampsOperator<T> operator = new ExtractTimestampsOperator<>(clean(extractor)); return transform("ExtractTimestamps", getTransformation().getOutputType(), operator) .setParallelism(inputParallelism); }
python
def get_user_roles(self): """Returns a list of roles for the current user """ if self.is_anonymous_user(): return [] current_user = ploneapi.user.get_current() return ploneapi.user.get_roles(user=current_user)
python
def FormatProblem(self, d=None): """Return a text string describing the problem. Args: d: map returned by GetDictToFormat with with formatting added """ if not d: d = self.GetDictToFormat() output_error_text = self.__class__.ERROR_TEXT % d if ('reason' in d) and d['reason']: return '%s\n%s' % (output_error_text, d['reason']) else: return output_error_text
java
protected void addImagesToBook(final BuildData buildData, final String locale, final String imageFolder, boolean revertToDefaultLocale, boolean logErrors) { // Load the database constants final byte[] failpenguinPng = blobConstantProvider.getBlobConstant( buildData.getServerEntities().getFailPenguinBlobConstantId()).getValue(); // Download the image files that were identified in the processing stage float imageProgress = 0; final float imageTotal = buildData.getImageLocations().size(); final int showPercent = 10; int lastPercent = 0; for (final TopicImageData imageLocation : buildData.getImageLocations()) { // Check if the app should be shutdown if (isShuttingDown.get()) { return; } boolean success = false; final int extensionIndex = imageLocation.getImageName().lastIndexOf("."); final int pathIndex = imageLocation.getImageName().lastIndexOf("/"); final int hypenIndex = imageLocation.getImageName().lastIndexOf("-"); if ( /* characters were found */ extensionIndex != -1 && pathIndex != -1 /* the path character was found before the extension */ && extensionIndex > pathIndex) { try { /* * The file name minus the extension should be an integer that references an ImageFile record ID. */ final String imageID; if (hypenIndex != -1) { imageID = imageLocation.getImageName().substring(pathIndex + 1, Math.min(extensionIndex, hypenIndex)); } else { imageID = imageLocation.getImageName().substring(pathIndex + 1, extensionIndex); } /* * If the image is the failpenguin the that means that an error has already occurred most likely from not * specifying an image file at all. */ if (imageID.equals(BuilderConstants.FAILPENGUIN_PNG_NAME)) { success = false; buildData.getErrorDatabase().addError(imageLocation.getTopic(), ErrorType.INVALID_IMAGES, "No image filename specified. Must be in the format [ImageFileID].extension e.g. 123.png, " + "" + "or images/321.jpg"); } else { final ImageWrapper imageFile = imageProvider.getImage(Integer.parseInt(imageID)); // TODO Uncomment this once Image Revisions are fixed. // if (imageLocation.getRevision() == null) { // imageFile = imageProvider.getImage(Integer.parseInt(imageID)); // } else { // imageFile = imageProvider.getImage(Integer.parseInt(imageID), imageLocation.getRevision()); // } // Find the image that matches this locale. If the locale isn't found then use the default locale LanguageImageWrapper languageImageFile = null; if (imageFile.getLanguageImages() != null && imageFile.getLanguageImages().getItems() != null) { final List<LanguageImageWrapper> languageImages = imageFile.getLanguageImages().getItems(); for (final LanguageImageWrapper image : languageImages) { if (image.getLocale().getValue().equals(locale)) { languageImageFile = image; break; } else if (revertToDefaultLocale && image.getLocale().getValue().equals( buildData.getDefaultLocale()) && languageImageFile == null) { languageImageFile = image; } } } if (languageImageFile != null && languageImageFile.getImageData() != null) { success = true; addToZip(buildData.getBookLocaleFolder() + imageLocation.getImageName(), languageImageFile.getImageData(), buildData); } else if (logErrors) { buildData.getErrorDatabase().addError(imageLocation.getTopic(), ErrorType.INVALID_IMAGES, "ImageFile ID " + imageID + " from image location " + imageLocation.getImageName() + " was not found!"); } } } catch (final NumberFormatException ex) { success = false; if (logErrors) { buildData.getErrorDatabase().addError(imageLocation.getTopic(), ErrorType.INVALID_IMAGES, imageLocation.getImageName() + " is not a valid image. Must be in the format [ImageFileID].extension e.g." + " 123" + ".png, or images/321.jpg"); log.debug("", ex); } } catch (final Exception ex) { success = false; if (logErrors) { buildData.getErrorDatabase().addError(imageLocation.getTopic(), ErrorType.INVALID_IMAGES, imageLocation.getImageName() + " is not a valid image. Must be in the format [ImageFileID].extension e.g." + " 123" + ".png, or images/321.jpg"); log.debug("", ex); } } } // Put in a place holder in the image couldn't be found if (logErrors && !success) { buildData.getOutputFiles().put(imageFolder + imageLocation.getImageName(), failpenguinPng); } final int progress = Math.round(imageProgress / imageTotal * 100); if (progress - lastPercent >= showPercent) { lastPercent = progress; log.info("\tDownloading " + locale + " Images " + progress + "% done"); } ++imageProgress; } }
python
def parse_limits_list(path, limits): """Parse a structured list of flux limits as obtained from a YAML file Yields tuples of reaction ID, lower and upper flux bounds. Path can be given as a string or a context. """ context = FilePathContext(path) for limit_def in limits: if 'include' in limit_def: include_context = context.resolve(limit_def['include']) for limit in parse_limits_file(include_context): yield limit else: yield parse_limit(limit_def)
python
def receive_response(self, message, insecure=False): """Read the response for the given message. @type message: OmapiMessage @type insecure: bool @param insecure: avoid an OmapiError about a wrong authenticator @rtype: OmapiMessage @raises OmapiError: @raises socket.error: """ response = self.receive_message() if not response.is_response(message): raise OmapiError("received message is not the desired response") # signature already verified if response.authid != self.protocol.defauth and not insecure: raise OmapiError("received message is signed with wrong authenticator") return response
python
def show_cimreferences(server_name, server, org_vm): """ Show info about the CIM_ReferencedProfile instances. Goal. Clearly show what the various refs look like by using profile names for the antecedent and dependent rather than instances. """ try: ref_insts = server.conn.EnumerateInstances("CIM_ReferencedProfile", namespace=server.interop_ns) prof_insts = server.conn.EnumerateInstances("CIM_RegisteredProfile", namespace=server.interop_ns) except pywbem.Error as er: print('CIM_ReferencedProfile failed for conn=%s\nexception=%s' % (server, er)) raise # create dictionary of registered profiles with path as key profile_dict = NocaseDict() for inst in prof_insts: profile_dict[str(path_wo_ns(inst))] = inst # The output table should look like the following: # DEP prof name, CIM_Ref Subclass, ANT name rows = [] # TODO clean up the try blocks, etc. after we run the complete # set of servers. for inst in ref_insts: try: dep_path = inst.get('Dependent') ant_path = inst.get('Antecedent') except Exception as ex: # pylint: disable=broad-except print('Exception get properties %s in %s' % (ex, inst)) row = ["Unknown", inst.classname, "Unknown"] rows.append(row) continue try: dep = profile_dict[str(path_wo_ns(dep_path))] except Exception as ex: # pylint: disable=broad-except print('Exception %s get from profile_dict: ' 'Dependent reference in ReferencedProfiles "%s" does ' 'not match any registered profile instance.\nReference ' 'instance:\n%s' % (ex, str(path_wo_ns(dep_path)), inst.tomof())) row = ["Unknown", inst.classname, "Unknown"] rows.append(row) continue try: ant = profile_dict[str(path_wo_ns(ant_path))] except Exception as ex: # pylint: disable=broad-except print('Exception get from profile_dict %s. ' 'Antecedent reference in ReferencedProfiles "%s" does ' 'not match any registered profile instance.\n' 'Reference instance:\n%s' % (ex, str(path_wo_ns(ant_path)), inst.tomof())) row = ["Unknown", inst.classname, "Unknown"] rows.append(row) continue try: row = [profile_name(org_vm, dep), inst.classname, profile_name(org_vm, ant)] except Exception as ex: # pylint: disable=broad-except print('Exception row create %s ' % ex) row = ["Unknown", inst.classname, "Unknown"] rows.append(row) title = '%s: Simplified table of CIM_References with ref class name ' \ 'and profile names' % server_name headers = ['Dependent\nProfile Name', 'CIM_Reference\nClass', 'Antecedent\nProfile Name'] print_table(title, headers, rows, sort_columns=1) # TODO: The following has become a duplicate of the above. # Probably drop this. if VERBOSE: rows = [] for inst in ref_insts: dep_path = inst.get('Dependent') ant_path = inst.get('Antecedent') dep = profile_dict[str(dep_path)] ant = profile_dict[str(ant_path)] row = [str(path_wo_ns(dep)), inst.classname, str(path_wo_ns(ant))] rows.append(row) title = '%s: Table of reference insts' % server_name headers = ['Dependent Profile', 'Ref Class', 'Antecedent Profile'] print_table(title, headers, rows, sort_columns=1)
java
@Override public void moduleMetaDataCreated(MetaDataEvent<ModuleMetaData> event) throws MetaDataException { if (!MetaDataUtils.copyModuleMetaDataSlot(event, mmdSlot)) { getModuleBindingMap(event.getMetaData()); } }
python
def timezone(self): """The name of the time zone for the location. A list of time zone names can be obtained from pytz. For example. >>> from pytz import all_timezones >>> for timezone in all_timezones: ... print(timezone) """ if not self._timezone_group and not self._timezone_location: return None if self._timezone_location != "": return "%s/%s" % (self._timezone_group, self._timezone_location) else: return self._timezone_group
python
def install_js(): ''' Copy built BokehJS files into the Python source tree. Returns: None ''' target_jsdir = join(SERVER, 'static', 'js') target_cssdir = join(SERVER, 'static', 'css') target_tslibdir = join(SERVER, 'static', 'lib') STATIC_ASSETS = [ join(JS, 'bokeh.js'), join(JS, 'bokeh.min.js'), join(CSS, 'bokeh.css'), join(CSS, 'bokeh.min.css'), ] if not all(exists(a) for a in STATIC_ASSETS): print(BOKEHJS_INSTALL_FAIL) sys.exit(1) if exists(target_jsdir): shutil.rmtree(target_jsdir) shutil.copytree(JS, target_jsdir) if exists(target_cssdir): shutil.rmtree(target_cssdir) shutil.copytree(CSS, target_cssdir) if exists(target_tslibdir): shutil.rmtree(target_tslibdir) if exists(TSLIB): # keep in sync with bokehjs/src/compiler/compile.ts lib = { "lib.es5.d.ts", "lib.dom.d.ts", "lib.es2015.core.d.ts", "lib.es2015.promise.d.ts", "lib.es2015.symbol.d.ts", "lib.es2015.iterable.d.ts", } shutil.copytree(TSLIB, target_tslibdir, ignore=lambda _, files: [ f for f in files if f not in lib ])
java
protected synchronized void clear() throws ObjectManagerException { if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled()) trace.entry(this, cclass , "clear" ); super.clear(); managedObjectsOnDisk = new java.util.HashSet(); managedObjectsToWrite = new ConcurrentHashMap(concurrency); tokensToDelete = new ConcurrentHashMap(concurrency); deleteDirectory(storeDirectoryName); // Make the directory to write the objects to. java.io.File storeDirectory = new java.io.File(storeDirectoryName); if (!(storeDirectory.isDirectory())) { storeDirectory.mkdirs(); } // if (!(storeDirectory.isDirectory())). // Sequence numbers 0-200 are reserved. sequenceNumber = initialSequenceNumber; writeHeader(); if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled()) trace.exit(this, cclass , "clear" ); }
java
public boolean showSize() { CmsListItemDetails details = getList().getMetadata().getItemDetailDefinition(LIST_DETAIL_SIZE); return (details != null) && details.isVisible(); }
python
def _freq_parser(self, freq): """ day, hour, min, sec, """ freq = freq.lower().strip() try: if "day" in freq: freq = freq.replace("day", "") return timedelta(days=int(freq)) elif "hour" in freq: freq = freq.replace("hour", "") return timedelta(hours=int(freq)) elif "min" in freq: freq = freq.replace("min", "") return timedelta(minutes=int(freq)) elif "sec" in freq: freq = freq.replace("sec", "") return timedelta(seconds=int(freq)) else: raise Exception("%s is invalid format. use day, hour, min, sec." % freq) except: raise Exception("%s is invalid format. use day, hour, min, sec." % freq)
java
public TVInfo getTVInfo(int tvID, String language, String... appendToResponse) throws MovieDbException { TmdbParameters parameters = new TmdbParameters(); parameters.add(Param.ID, tvID); parameters.add(Param.LANGUAGE, language); parameters.add(Param.APPEND, appendToResponse); URL url = new ApiUrl(apiKey, MethodBase.TV).buildUrl(parameters); String webpage = httpTools.getRequest(url); try { return MAPPER.readValue(webpage, TVInfo.class); } catch (IOException ex) { throw new MovieDbException(ApiExceptionType.MAPPING_FAILED, "Failed to get TV Info", url, ex); } }
java
public Property createProperty(final String name) { final PropertyFactory factory = getFactory(name); if (factory != null) { return factory.createProperty(); } else if (isExperimentalName(name)) { return new XProperty(name); } else if (allowIllegalNames()) { return new XProperty(name); } else { throw new IllegalArgumentException("Illegal property [" + name + "]"); } }
java
public static boolean isOverlappingBefore(final Interval timeRange, final Interval timeRangeToCheck) { return ((timeRange.getStart() != null && timeRange.getStart().isAfter(timeRangeToCheck.getStart())) && (timeRange.getEnd() != null && timeRange.getEnd().isAfter(timeRangeToCheck.getEnd()))); }
python
def setup(db_class, simple_object_cls, primary_keys): """A simple API to configure the metadata""" table_name = simple_object_cls.__name__ column_names = simple_object_cls.FIELDS metadata = MetaData() table = Table(table_name, metadata, *[Column(cname, _get_best_column_type(cname), primary_key=cname in primary_keys) for cname in column_names]) db_class.metadata = metadata db_class.mapper_class = simple_object_cls db_class.table = table mapper(simple_object_cls, table)
java
public void addMainKeyBehavior() { // Add the read keyed listener to all unique keys with one field Record record = this.getMainRecord(); if (record == null) return; int keyCount = record.getKeyAreaCount(); for (int keyNumber = DBConstants.MAIN_KEY_AREA; keyNumber < keyCount + DBConstants.MAIN_KEY_AREA; keyNumber++) { KeyArea keyAreaInfo = record.getKeyArea(keyNumber); if (((keyAreaInfo.getUniqueKeyCode() == DBConstants.UNIQUE) || (keyAreaInfo.getUniqueKeyCode() == DBConstants.SECONDARY_KEY)) & (keyAreaInfo.getKeyFields() == 1)) { BaseField mainField = keyAreaInfo.getField(DBConstants.MAIN_KEY_FIELD); MainFieldHandler readKeyed = new MainFieldHandler(record.getKeyArea(keyNumber).getKeyName()); mainField.addListener(readKeyed); } } }
java
protected List<PropertyData> getChildPropertiesData(final NodeData nodeData, boolean forcePersistentRead) throws RepositoryException { List<PropertyData> childProperties = null; if (!forcePersistentRead) { childProperties = getCachedCleanChildPropertiesData(nodeData); if (childProperties != null) { return childProperties; } } final DataRequest request = new DataRequest(nodeData.getIdentifier(), DataRequest.GET_PROPERTIES); try { request.start(); if (!forcePersistentRead) { // Try first to get the value from the cache since a // request could have been launched just before childProperties = getCachedCleanChildPropertiesData(nodeData); if (childProperties != null) { return childProperties; } } return executeAction(new PrivilegedExceptionAction<List<PropertyData>>() { public List<PropertyData> run() throws RepositoryException { List<PropertyData> childProperties = CacheableWorkspaceDataManager.super.getChildPropertiesData(nodeData); if (childProperties.size() > 0 && cache.isEnabled()) { cache.addChildProperties(nodeData, childProperties); } return childProperties; } }); } finally { request.done(); } }
java
public void refresh(ExtensionComponentSet delta) { boolean fireOnChangeListeners = false; synchronized (getLoadLock()) { if (extensions==null) return; // not yet loaded. when we load it, we'll load everything visible by then, so no work needed Collection<ExtensionComponent<T>> found = load(delta); if (!found.isEmpty()) { List<ExtensionComponent<T>> l = Lists.newArrayList(extensions); l.addAll(found); extensions = sort(l); fireOnChangeListeners = true; } } if (fireOnChangeListeners) { fireOnChangeListeners(); } }
java
public static Point QuadKeyToTileXY(final String quadKey, final Point reuse) { final Point out = reuse == null ? new Point() : reuse; if (quadKey == null || quadKey.length() == 0) { throw new IllegalArgumentException("Invalid QuadKey: " + quadKey); } int tileX = 0; int tileY = 0; final int zoom = quadKey.length(); for (int i = 0 ; i < zoom; i++) { final int value = 1 << i; switch (quadKey.charAt(zoom - i - 1)) { case '0': break; case '1': tileX += value; break; case '2': tileY += value; break; case '3': tileX += value; tileY += value; break; default: throw new IllegalArgumentException("Invalid QuadKey: " + quadKey); } } out.x = tileX; out.y = tileY; return out; }
java
@GwtIncompatible("incompatible method") @Nullable public static Properties splitArrayElementsIntoProperties(final String[] array, final String delimiter) { return StringUtils.splitArrayElementsIntoProperties(array, delimiter, null); }
java
String rrToString() { StringBuffer sb = new StringBuffer(); sb.append(priority + " "); sb.append(weight + " "); sb.append(port + " "); sb.append(target); return sb.toString(); }
java
public static Model readPom(String path, JarFile jar) throws AnalysisException { final ZipEntry entry = jar.getEntry(path); Model model = null; if (entry != null) { //should never be null //noinspection CaughtExceptionImmediatelyRethrown try { final PomParser parser = new PomParser(); model = parser.parse(jar.getInputStream(entry)); if (model == null) { throw new AnalysisException(String.format("Unable to parse pom '%s/%s'", jar.getName(), path)); } } catch (AnalysisException ex) { throw ex; } catch (SecurityException ex) { LOGGER.warn("Unable to parse pom '{}' in jar '{}'; invalid signature", path, jar.getName()); LOGGER.debug("", ex); throw new AnalysisException(ex); } catch (IOException ex) { LOGGER.warn("Unable to parse pom '{}' in jar '{}' (IO Exception)", path, jar.getName()); LOGGER.debug("", ex); throw new AnalysisException(ex); } catch (Throwable ex) { LOGGER.warn("Unexpected error during parsing of the pom '{}' in jar '{}'", path, jar.getName()); LOGGER.debug("", ex); throw new AnalysisException(ex); } } return model; }
java
private String stem(int del, String add, String affix) { int stem_length = yylength() - del; int i = 0; String result=yytext().substring(0,stem_length); if (option(change_case)) { result=result.toLowerCase(); } if (!(add.length()==0)) result+=add; if(option(print_affixes)) { result+=("+"+affix); } return result; }
java
private static void isCompletePartitionKeyPresentInQuery(Queue filterQueue, MetamodelImpl metaModel, EntityMetadata metadata) { Set<String> partitionKeyFields = new HashSet<String>(); populateEmbeddedIdFields(metaModel.embeddable(metadata.getIdAttribute().getBindableJavaType()).getAttributes(), metaModel, partitionKeyFields); Set<String> queryAttributes = new HashSet<String>(); for (Object object : filterQueue) { if (object instanceof FilterClause) { FilterClause filter = (FilterClause) object; String property = filter.getProperty(); String filterAttr[] = property.split("\\."); for (String s : filterAttr) { queryAttributes.add(s); } } } if (!queryAttributes.containsAll(partitionKeyFields)) { throw new QueryHandlerException("Incomplete partition key fields in query"); } }
python
def runprofile(mainfunction, output, timeout = 0, calibrate=False): ''' Run the functions profiler and save the result If timeout is greater than 0, the profile will automatically stops after timeout seconds ''' if noprofiler == True: print('ERROR: profiler and/or pstats library missing ! Please install it (probably package named python-profile) before running a profiling !') return False # This is the main function for profiling def _profile(): profile.run(mainfunction, output) print('=> RUNNING FUNCTIONS PROFILER\n\n'); sys.stdout.flush(); # Calibrate the profiler (only use this if the profiler produces some funny stuff, but calibration can also produce even more funny stuff with the latest cProfile of Python v2.7! So you should only enable calibration if necessary) if calibrate: print('Calibrating the profiler...'); sys.stdout.flush(); cval = calibrateprofile() print('Calibration found value : %s' % cval); sys.stdout.flush(); print('Initializing the profiler...'); sys.stdout.flush(); # Run in timeout mode (if the function cannot ends by itself, this is the best mode: the function must ends for the profile to be saved) if timeout > 0: pthread = KThread(target=_profile) # we open the function with the profiler, in a special killable thread (see below why) print('Will now run the profiling and terminate it in %s seconds. Results will be saved in %s' % (str(timeout), str(output))); sys.stdout.flush(); print('\nCountdown:'); sys.stdout.flush(); for i in range(0,5): print(str(5-i)) sys.stdout.flush() time.sleep(1) print('0\nStarting to profile...'); sys.stdout.flush(); pthread.start() # starting the thread time.sleep(float(timeout)) # after this amount of seconds, the thread gets killed and the profiler will end its job print('\n\nFinishing the profile and saving to the file %s' % str(output)); sys.stdout.flush(); pthread.kill() # we must end the main function in order for the profiler to output its results (if we didn't launch a thread and just closed the process, it would have done no result) # Run in full length mode (we run the function until it ends) else: print("Running the profiler, please wait until the process terminates by itself (if you forcequit before, the profile won't be saved)") _profile() print('=> Functions Profile done !') return True
python
def CheckSameObj(obj0, obj1, LFields=None): """ Check if two variables are the same instance of a ToFu class Checks a list of attributes, provided by LField Parameters ---------- obj0 : tofu object A variable refering to a ToFu object of any class obj1 : tofu object A variable refering to a ToFu object of the same class as obj0 LFields : None / str / list The criteria against which the two objects are evaluated: - None: True is returned - str or list: tests whether all listed attributes have the same value Returns ------- A : bool True only is LField is None or a list of attributes that all match """ A = True if LField is not None and obj0.__class__==obj1.__class__: assert type(LFields) in [str,list] if type(LFields) is str: LFields = [LFields] assert all([type(s) is str for s in LFields]) ind = [False for ii in range(0,len(LFields))] Dir0 = dir(obj0.Id)+dir(obj0) Dir1 = dir(obj1.Id)+dir(obj1) for ii in range(0,len(LFields)): assert LFields[ii] in Dir0, LFields[ii]+" not in "+obj0.Id.Name assert LFields[ii] in Dir1, LFields[ii]+" not in "+obj1.Id.Name if hasattr(obj0,LFields[ii]): ind[ii] = np.all(getattr(obj0,LFields[ii])==getattr(obj1,LFields[ii])) else: ind[ii] = getattr(obj0.Id,LFields[ii])==getattr(obj1.Id,LFields[ii]) A = all(ind) return A
python
def _connect(contact_points=None, port=None, cql_user=None, cql_pass=None, protocol_version=None): ''' Connect to a Cassandra cluster. :param contact_points: The Cassandra cluster addresses, can either be a string or a list of IPs. :type contact_points: str or list of str :param cql_user: The Cassandra user if authentication is turned on. :type cql_user: str :param cql_pass: The Cassandra user password if authentication is turned on. :type cql_pass: str :param port: The Cassandra cluster port, defaults to None. :type port: int :param protocol_version: Cassandra protocol version to use. :type port: int :return: The session and cluster objects. :rtype: cluster object, session object ''' # Lazy load the Cassandra cluster and session for this module by creating a # cluster and session when cql_query is called the first time. Get the # Cassandra cluster and session from this module's __context__ after it is # loaded the first time cql_query is called. # # TODO: Call cluster.shutdown() when the module is unloaded on # master/minion shutdown. Currently, Master.shutdown() and Minion.shutdown() # do nothing to allow loaded modules to gracefully handle resources stored # in __context__ (i.e. connection pools). This means that the connection # pool is orphaned and Salt relies on Cassandra to reclaim connections. # Perhaps if Master/Minion daemons could be enhanced to call an "__unload__" # function, or something similar for each loaded module, connection pools # and the like can be gracefully reclaimed/shutdown. if (__context__ and 'cassandra_cql_returner_cluster' in __context__ and 'cassandra_cql_returner_session' in __context__): return __context__['cassandra_cql_returner_cluster'], __context__['cassandra_cql_returner_session'] else: contact_points = _load_properties(property_name=contact_points, config_option='cluster') contact_points = contact_points if isinstance(contact_points, list) else contact_points.split(',') port = _load_properties(property_name=port, config_option='port', set_default=True, default=9042) cql_user = _load_properties(property_name=cql_user, config_option='username', set_default=True, default="cassandra") cql_pass = _load_properties(property_name=cql_pass, config_option='password', set_default=True, default="cassandra") protocol_version = _load_properties(property_name=protocol_version, config_option='protocol_version', set_default=True, default=4) try: auth_provider = PlainTextAuthProvider(username=cql_user, password=cql_pass) ssl_opts = _get_ssl_opts() if ssl_opts: cluster = Cluster(contact_points, port=port, auth_provider=auth_provider, ssl_options=ssl_opts, protocol_version=protocol_version, compression=True) else: cluster = Cluster(contact_points, port=port, auth_provider=auth_provider, protocol_version=protocol_version, compression=True) for recontimes in range(1, 4): try: session = cluster.connect() break except OperationTimedOut: log.warning('Cassandra cluster.connect timed out, try %s', recontimes) if recontimes >= 3: raise # TODO: Call cluster.shutdown() when the module is unloaded on shutdown. __context__['cassandra_cql_returner_cluster'] = cluster __context__['cassandra_cql_returner_session'] = session __context__['cassandra_cql_prepared'] = {} log.debug('Successfully connected to Cassandra cluster at %s', contact_points) return cluster, session except TypeError: pass except (ConnectionException, ConnectionShutdown, NoHostAvailable): log.error('Could not connect to Cassandra cluster at %s', contact_points) raise CommandExecutionError('ERROR: Could not connect to Cassandra cluster.')
python
def _migrate0to1(previous: Mapping[str, Any]) -> SettingsMap: """ Migrate to version 1 of the feature flags file. Replaces old IDs with new IDs and sets any False values to None """ next: SettingsMap = {} for s in settings: id = s.id old_id = s.old_id if previous.get(id) is True: next[id] = True elif previous.get(old_id) is True: next[id] = True else: next[id] = None return next
java
static Throwable[] getSuppressed(Throwable throwable) { try { Method getSuppressed = throwable.getClass().getMethod("getSuppressed"); return (Throwable[]) getSuppressed.invoke(throwable); } catch (NoSuchMethodException e) { return new Throwable[0]; } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } }
python
def highlight_block(context, nodelist, lexer, **kwargs): """ Code is nodelist ``rendered`` in ``context`` Returns highlighted code ``div`` tag from ``HtmlFormatter`` Lexer is guessed by ``lexer`` name arguments are passed into the formatter Syntax:: {% highlight_block [lexer name] [formatter options] %} ... source code .. {% endhighlight_block %} Example:: {% highlight_block python linenos=true %} print '{{ request.path }}' {% endhighlight_block %} """ if highlighter is None: return '<pre>%s</pre>' % str(nodelist.render(context) or '') return highlighter(nodelist.render(context) or '', get_lexer_by_name(lexer), HtmlFormatter(**kwargs))
python
async def _wait_for_new(self, entity_type, entity_id): """Wait for a new object to appear in the Model and return it. Waits for an object of type ``entity_type`` with id ``entity_id`` to appear in the model. This is similar to watching for the object using ``block_until``, but uses the watcher rather than polling. """ # if the entity is already in the model, just return it if entity_id in self.state._live_entity_map(entity_type): return self.state._live_entity_map(entity_type)[entity_id] return await self._wait(entity_type, entity_id, None)