language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def map_colors(arr, crange, cmap, hex=True): """ Maps an array of values to RGB hex strings, given a color range and colormap. """ if isinstance(crange, np.ndarray): xsorted = np.argsort(crange) ypos = np.searchsorted(crange, arr) arr = xsorted[ypos] else: if isinstance(crange, tuple): cmin, cmax = crange else: cmin, cmax = np.nanmin(arr), np.nanmax(arr) arr = (arr - cmin) / (cmax-cmin) arr = np.ma.array(arr, mask=np.logical_not(np.isfinite(arr))) arr = cmap(arr) if hex: return rgb2hex(arr) else: return arr
java
public <C extends EvictionCandidate<A, E>> C evaluate(Iterable<C> evictionCandidates) { C selectedEvictionCandidate = null; long now = Clock.currentTimeMillis(); for (C currentEvictionCandidate : evictionCandidates) { if (selectedEvictionCandidate == null) { selectedEvictionCandidate = currentEvictionCandidate; } else { E evictable = currentEvictionCandidate.getEvictable(); if (isExpired(now, evictable)) { return currentEvictionCandidate; } int comparisonResult = evictionPolicyComparator.compare(selectedEvictionCandidate, currentEvictionCandidate); if (comparisonResult == EvictionPolicyComparator.SECOND_ENTRY_HAS_HIGHER_PRIORITY_TO_BE_EVICTED) { selectedEvictionCandidate = currentEvictionCandidate; } } } return selectedEvictionCandidate; }
java
@Override public Object invoke() { HttpRequestBase request = template.buildRequest(context); HttpResponse response = template.executeRequest(context, request); return response == null? null :template.handleResponse(context, response); }
java
private Long findServerId(MysqlConnection mysqlConnection) { try { ResultSetPacket packet = mysqlConnection.query("show variables like 'server_id'"); List<String> fields = packet.getFieldValues(); if (CollectionUtils.isEmpty(fields)) { throw new CanalParseException("command : show variables like 'server_id' has an error! pls check. you need (at least one of) the SUPER,REPLICATION CLIENT privilege(s) for this operation"); } return Long.valueOf(fields.get(1)); } catch (IOException e) { throw new CanalParseException("command : show variables like 'server_id' has an error!", e); } }
python
def help_center_section_subscriptions(self, section_id, locale=None, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/subscriptions#list-section-subscriptions" api_path = "/api/v2/help_center/sections/{section_id}/subscriptions.json" api_path = api_path.format(section_id=section_id) if locale: api_opt_path = "/api/v2/help_center/{locale}/sections/{section_id}/subscriptions.json" api_path = api_opt_path.format(section_id=section_id, locale=locale) return self.call(api_path, **kwargs)
java
public static <S, T extends Plugin<S>> OrderAwarePluginRegistry<T, S> empty() { return create(Collections.emptyList()); }
java
@Override public double getPaymentOffset(double fixingTime) { if(paymentOffsetCode == null) { return paymentOffset; } if(paymentOffsets.containsKey(fixingTime)) { return paymentOffsets.get(fixingTime); } else { /** * @TODO In case paymentDate is relevant for the index modeling, it should be checked * if the following derivation of paymentDate is accurate (e.g. wo we have a fixingOffset). * In such a case, this method may be overridden. */ LocalDate referenceDate = getReferenceDate(); LocalDate fixingDate = FloatingpointDate.getDateFromFloatingPointDate(referenceDate, fixingTime); LocalDate paymentDate = paymentBusinessdayCalendar.getAdjustedDate(fixingDate, paymentOffsetCode, paymentDateRollConvention); double paymentTime = FloatingpointDate.getFloatingPointDateFromDate(referenceDate, paymentDate); paymentOffsets.put(fixingTime, paymentTime-fixingTime); return paymentTime-fixingTime; } }
python
def _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None): """ Apply binary operator `func` to self, other using alignment and fill conventions determined by the fill_value, axis, and level kwargs. Parameters ---------- self : DataFrame other : Series func : binary operator fill_value : object, default None axis : {0, 1, 'columns', 'index', None}, default None level : int or None, default None Returns ------- result : DataFrame """ if fill_value is not None: raise NotImplementedError("fill_value {fill} not supported." .format(fill=fill_value)) if axis is not None: axis = self._get_axis_number(axis) if axis == 0: return self._combine_match_index(other, func, level=level) else: return self._combine_match_columns(other, func, level=level) else: if not len(other): return self * np.nan if not len(self): # Ambiguous case, use _series so works with DataFrame return self._constructor(data=self._series, index=self.index, columns=self.columns) # default axis is columns return self._combine_match_columns(other, func, level=level)
java
public Template getTemplateByString(String content, boolean cache) { if (!cache) { return buildTemplateBySource(new StringSource(content, cache)); } String cacheKey = HashKit.md5(content); Template template = templateCache.get(cacheKey); if (template == null) { template = buildTemplateBySource(new StringSource(content, cache)); templateCache.put(cacheKey, template); } else if (devMode) { if (template.isModified()) { template = buildTemplateBySource(new StringSource(content, cache)); templateCache.put(cacheKey, template); } } return template; }
python
def char2range(d, is_bytes=False, invert=True): """Convert the characters in the dict to a range in string form.""" fmt = bytesformat if is_bytes else uniformat maxrange = MAXASCII if is_bytes else MAXUNICODE for k1 in sorted(d.keys()): v1 = d[k1] if not isinstance(v1, list): char2range(v1, is_bytes=is_bytes, invert=invert) else: inverted = k1.startswith('^') v1.sort() last = None first = None ilast = None ifirst = None v2 = [] iv2 = [] if v1 and v1[0] != 0: ifirst = 0 for i in v1: if first is None: first = i last = i elif i == last + 1: last = i elif first is not None: if first == last: v2.append(fmt(first)) else: v2.append("%s-%s" % (fmt(first), fmt(last))) if invert and ifirst is not None: ilast = first - 1 if ifirst == ilast: iv2.append(fmt(ifirst)) else: iv2.append("%s-%s" % (fmt(ifirst), fmt(ilast))) ifirst = last + 1 first = i last = i if not v1: iv2 = ["%s-%s" % (fmt(0), fmt(maxrange))] elif first is not None: if first == last: v2.append(fmt(first)) else: v2.append("%s-%s" % (fmt(first), fmt(last))) if invert and ifirst is not None: ilast = first - 1 if ifirst == ilast: iv2.append(fmt(ifirst)) else: iv2.append("%s-%s" % (fmt(ifirst), fmt(ilast))) ifirst = last + 1 if invert and ifirst <= maxrange: ilast = maxrange if ifirst == ilast: iv2.append(fmt(ifirst)) else: iv2.append("%s-%s" % (fmt(ifirst), fmt(ilast))) d[k1] = ''.join(v2) if invert: d[k1[1:] if inverted else '^' + k1] = ''.join(iv2)
java
public static Point getSize (float scale, Mirage image) { int width = Math.max(0, Math.round(image.getWidth() * scale)); int height = Math.max(0, Math.round(image.getHeight() * scale)); return new Point(width, height); }
python
def as_graph(self) -> Graph: # pragma: no cover """Returns a :class:`graphviz.Graph` representation of this bipartite graph.""" if Graph is None: raise ImportError('The graphviz package is required to draw the graph.') graph = Graph() nodes_left = {} # type: Dict[TLeft, str] nodes_right = {} # type: Dict[TRight, str] node_id = 0 for (left, right), value in self._edges.items(): if left not in nodes_left: name = 'node{:d}'.format(node_id) nodes_left[left] = name graph.node(name, label=str(left)) node_id += 1 if right not in nodes_right: name = 'node{:d}'.format(node_id) nodes_right[right] = name graph.node(name, label=str(right)) node_id += 1 edge_label = value is not True and str(value) or '' graph.edge(nodes_left[left], nodes_right[right], edge_label) return graph
java
public JSONResource json(URI anUri, AbstractContent requestContent) throws IOException { return doPOSTOrPUT(anUri, requestContent, createJSONResource()); }
python
def build_footprint(node: ast.AST, first_line_no: int) -> Set[int]: """ Generates a list of lines that the passed node covers, relative to the marked lines list - i.e. start of function is line 0. """ return set( range( get_first_token(node).start[0] - first_line_no, get_last_token(node).end[0] - first_line_no + 1, ) )
java
public boolean renderURL(String urlstring, OutputStream out, Type type) throws IOException, SAXException { if (!urlstring.startsWith("http:") && !urlstring.startsWith("https:") && !urlstring.startsWith("ftp:") && !urlstring.startsWith("file:")) urlstring = "http://" + urlstring; //Open the network connection DocumentSource docSource = new DefaultDocumentSource(urlstring); //Parse the input document DOMSource parser = new DefaultDOMSource(docSource); Document doc = parser.parse(); //create the media specification MediaSpec media = new MediaSpec(mediaType); media.setDimensions(windowSize.width, windowSize.height); media.setDeviceDimensions(windowSize.width, windowSize.height); //Create the CSS analyzer DOMAnalyzer da = new DOMAnalyzer(doc, docSource.getURL()); da.setMediaSpec(media); da.attributesToStyles(); //convert the HTML presentation attributes to inline styles da.addStyleSheet(null, CSSNorm.stdStyleSheet(), DOMAnalyzer.Origin.AGENT); //use the standard style sheet da.addStyleSheet(null, CSSNorm.userStyleSheet(), DOMAnalyzer.Origin.AGENT); //use the additional style sheet da.addStyleSheet(null, CSSNorm.formsStyleSheet(), DOMAnalyzer.Origin.AGENT); //render form fields using css da.getStyleSheets(); //load the author style sheets BrowserCanvas contentCanvas = new BrowserCanvas(da.getRoot(), da, docSource.getURL()); contentCanvas.setAutoMediaUpdate(false); //we have a correct media specification, do not update contentCanvas.getConfig().setClipViewport(cropWindow); contentCanvas.getConfig().setLoadImages(loadImages); contentCanvas.getConfig().setLoadBackgroundImages(loadBackgroundImages); if (type == Type.PNG) { contentCanvas.createLayout(windowSize); ImageIO.write(contentCanvas.getImage(), "png", out); } else if (type == Type.SVG) { setDefaultFonts(contentCanvas.getConfig()); contentCanvas.createLayout(windowSize); Writer w = new OutputStreamWriter(out, "utf-8"); writeSVG(contentCanvas.getViewport(), w); w.close(); } docSource.close(); return true; }
python
def _shape_repr(shape): """Return a platform independent reprensentation of an array shape Under Python 2, the `long` type introduces an 'L' suffix when using the default %r format for tuples of integers (typically used to store the shape of an array). Under Windows 64 bit (and Python 2), the `long` type is used by default in numpy shapes even when the integer dimensions are well below 32 bit. The platform specific type causes string messages or doctests to change from one platform to another which is not desirable. Under Python 3, there is no more `long` type so the `L` suffix is never introduced in string representation. >>> _shape_repr((1, 2)) '(1, 2)' >>> one = 2 ** 64 / 2 ** 64 # force an upcast to `long` under Python 2 >>> _shape_repr((one, 2 * one)) '(1, 2)' >>> _shape_repr((1,)) '(1,)' >>> _shape_repr(()) '()' """ if len(shape) == 0: return "()" joined = ", ".join("%d" % e for e in shape) if len(shape) == 1: # special notation for singleton tuples joined += ',' return "(%s)" % joined
python
def do_execute(self): """ The actual execution of the actor. :return: None if successful, otherwise error message :rtype: str """ expr = str(self.resolve_option("expression")) expr = expr.replace("{X}", str(self.input.payload)) self._output.append(Token(eval(expr))) return None
python
def p_suffix(self, length=None, elipsis=False): "Return the rest of the input" if length is not None: result = self.input[self.pos:self.pos + length] if elipsis and len(result) == length: result += "..." return result return self.input[self.pos:]
java
public void registerAggregator(String name, Class<? extends Aggregator<?>> aggregator) { this.aggregators.put(name, aggregator); }
python
def _check_valid_condition(self, get_params): """ Check if the condition has been met. We need to make sure that we are of the correct type. """ try: variable = get_params(self.variable) except: # noqa e722 variable = None value = self.value # if None, return oppositely if variable is None: return not self.default # convert the value to a correct type if isinstance(variable, bool): value = bool(self.value) elif isinstance(variable, Number): try: value = int(self.value) except: # noqa e722 try: value = float(self.value) except: # noqa e722 # could not parse return not self.default # compare and return the result if self.condition == "=": return (variable == value) == self.default elif self.condition == ">": return (variable > value) == self.default elif self.condition == "<": return (variable < value) == self.default
java
@Override public long generate(IAtomContainer container) { long[] hashes = generator.generate(container); long[] rotated = new long[hashes.length]; Arrays.sort(hashes); // seed with Mersenne prime 2^31-1 long hash = 2147483647L; for (int i = 0; i < hashes.length; i++) { // if non-unique, then get the next random number if (i > 0 && hashes[i] == hashes[i - 1]) { hash ^= rotated[i] = pseudorandom.next(rotated[i - 1]); } else { hash ^= rotated[i] = hashes[i]; } } return hash; }
python
def read_tcp_size(conn, size): """Read `size` number of bytes from `conn`, retrying as needed.""" chunks = [] bytes_read = 0 while bytes_read < size: chunk = conn.recv(size - bytes_read) if not chunk: if bytes_read > 0: logging.warning("Incomplete read: %s of %s.", bytes_read, size) return chunks.append(chunk) bytes_read += len(chunk) return b"".join(chunks)
java
public String[] getEnabledCipherSuites(SSLEngine sslEngine) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "getEnabledCipherSuites"); } String ciphers[] = null; // First check the properties object for the ciphers. Object ciphersObject = this.myConfig.get(Constants.SSLPROP_ENABLED_CIPHERS); if (null == ciphersObject) { // Did not find the enabled ciphers. Need to determine them here. String securityLevel = this.myConfig.getProperty(Constants.SSLPROP_SECURITY_LEVEL); if (null == securityLevel) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Defaulting to HIGH security level"); } securityLevel = Constants.SECURITY_LEVEL_HIGH; } // Found the security level. ciphers = Constants.adjustSupportedCiphersToSecurityLevel( sslEngine.getSupportedCipherSuites(), securityLevel); } else { // Found enabled cipher suites. Now we need to put them in the right kind of object. if (ciphersObject instanceof String) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "enabledCipherSuites is a String: " + ciphersObject); } // Quickly break the string up into an array based on space delimiters. ciphers = ((String) ciphersObject).split("\\s"); } else if (ciphersObject instanceof String[]) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "enabledCipherSuites is a String array"); } ciphers = (String[]) ciphersObject; } else { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Invalid object for enabledCipherSuites: " + ciphersObject); } } } // check for when we're returning 0 ciphers as the connection will not // work and will be throwing errors later on if (null == ciphers || 0 == ciphers.length) { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Unable to find any enabled ciphers"); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "getEnabledCipherSuites"); } return ciphers; }
java
public EJBHome getEJBHome() throws RemoteException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.entry(tc, "getEJBHome"); if (home == null) { try { Class homeClass = null; try { // // If we are running on the server side, then the thread // context loader would have been set appropriately by // the container. If running on a client, then check the // thread context class loader first // ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl != null) { homeClass = cl.loadClass(homeInterface); } else { throw new ClassNotFoundException(); } } catch (ClassNotFoundException ex) { //FFDCFilter.processException(ex, CLASS_NAME + ".getEJBHome", "141", this); try { homeClass = Class.forName(homeInterface); } catch (ClassNotFoundException e) { //FFDCFilter.processException(e, CLASS_NAME + ".getEJBHome", // "148", this); throw new ClassNotFoundException(homeInterface); } } InitialContext ctx = null; try { // Locate the home //91851 begin if (this.initialContextProperties == null) { ctx = new InitialContext(); } else { try { ctx = new InitialContext(this.initialContextProperties); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) // d144064 Tr.debug(tc, "Created an initial context with the " + "initialContextProperties, providerURL = " + (String) initialContextProperties.get("java.naming.provider.url") + " INITIAL_CONTEXT_FACTORY = " + (String) initialContextProperties.get(Context.INITIAL_CONTEXT_FACTORY)); } catch (NamingException ne) { //FFDCFilter.processException(ne, CLASS_NAME + ".getEJBHome", // "177", this); ctx = new InitialContext(); } } //91851 end home = (EJBHome) PortableRemoteObject. narrow(ctx.lookup(homeJNDIName), homeClass); } catch (NoInitialContextException e) { //FFDCFilter.processException(e, CLASS_NAME + ".getEJBHome", "188", this); java.util.Properties p = new java.util.Properties(); p.put(Context.INITIAL_CONTEXT_FACTORY, "com.ibm.websphere.naming.WsnInitialContextFactory"); ctx = new InitialContext(p); home = (EJBHome) PortableRemoteObject. narrow(ctx.lookup(homeJNDIName), homeClass); } } catch (NamingException e) { // Problem looking up the home //FFDCFilter.processException(e, CLASS_NAME + ".getEJBHome", "201", this); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.exit(tc, "getEJBHome", e); RemoteException re = new NoSuchObjectException("Could not find home in JNDI"); re.detail = e; throw re; } catch (ClassNotFoundException e) { // We couldn't find the home interface's class //FFDCFilter.processException(e, CLASS_NAME + ".getEJBHome", "213", this); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.exit(tc, "getEJBHome", e); throw new RemoteException("Could not load home interface", e); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.exit(tc, "getEJBHome"); return home; }
java
protected void handleRollback(LocalTransaction transaction) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "handleRollback", transaction); // Roll back the transaction if we created it. if (transaction != null) { try { transaction.rollback(); } catch (SIException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.impl.PtoPOutputHandler.handleRollback", "1:1644:1.241", this); SibTr.exception(tc, e); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "handleRollback"); }
python
def get_available_label_stores(self, usefield='tryall'): """ Get the label store values that may be used for writing this annotation. Available store values include: - the undefined values in the standard wfdb labels - the store values not used in the current annotation object. - the store values whose standard wfdb symbols/descriptions match those of the custom labels (if custom_labels exists) If 'usefield' is explicitly specified, the function will use that field to figure out available label stores. If 'usefield' is set to 'tryall', the function will choose one of the contained attributes by checking availability in the order: label_store, symbol, description """ # Figure out which field to use to get available labels stores. if usefield == 'tryall': if self.label_store is not None: usefield = 'label_store' elif self.symbol is not None: usefield = 'symbol' elif self.description is not None: usefield = 'description' else: raise ValueError('No label fields are defined. At least one of the following is required: ', ann_label_fields) return self.get_available_label_stores(usefield = usefield) # Use the explicitly stated field to get available stores. else: # If usefield == 'label_store', there are slightly fewer/different steps # compared to if it were another option contained_field = getattr(self, usefield) # Get the unused label_store values if usefield == 'label_store': unused_label_stores = set(ann_label_table['label_store'].values) - contained_field else: # the label_store values from the standard wfdb annotation labels # whose symbols are not contained in this annotation unused_field = set(ann_label_table[usefield].values) - contained_field unused_label_stores = ann_label_table.loc[ann_label_table[usefield] in unused_field, 'label_store'].values # Get the standard wfdb label_store values overwritten by the # custom_labels if any if self.custom_symbols is not None: custom_field = set(self.get_custom_label_attribute(usefield)) if usefield == 'label_store': overwritten_label_stores = set(custom_field).intersection(set(ann_label_table['label_store'])) else: overwritten_fields = set(custom_field).intersection(set(ann_label_table[usefield])) overwritten_label_stores = ann_label_table.loc[ann_label_table[usefield] in overwritten_fields, 'label_store'].values else: overwritten_label_stores = set() # The undefined values in the standard wfdb labels undefined_label_stores = self.get_undefined_label_stores() # Final available label stores = undefined + unused + overwritten available_label_stores = set(undefined_label_stores).union(set(unused_label_stores)).union(overwritten_label_stores) return available_label_stores
java
public static IPostProcessor wrap(final IPostProcessor postProcessor, final IProcessorDialect dialect) { Validate.notNull(dialect, "Dialect cannot be null"); if (postProcessor == null) { return null; } return new PostProcessorWrapper(postProcessor, dialect); }
python
def __check_right_side_conflict(x, y, dfs_data): """Checks to see if the frond xy will conflict with a frond on the right side of the embedding.""" r = dfs_data['FG']['r'] w, z = dfs_data['RF'][r] return __check_conflict_fronds(x, y, w, z, dfs_data)
python
def from_name(cls, fullname, soco, *args, **kwargs): """Instantiate a plugin by its full name.""" _LOG.info('Loading plugin %s', fullname) parts = fullname.split('.') modname = '.'.join(parts[:-1]) clsname = parts[-1] mod = importlib.import_module(modname) class_ = getattr(mod, clsname) _LOG.info('Loaded class %s', class_) return class_(soco, *args, **kwargs)
python
def day_of_year(dt): # type: (int) -> int """Day index of year from 1 to 365 or 366""" sec = time.mktime(dt.timetuple()) t = time.localtime(sec) return t.tm_yday
java
public static void clearClassLoader(final ClassLoader classLoader) { BeanPropertiesCache.clear(classLoader); final Iterator<Map.Entry<CacheKey, String>> it = CLASS_CACHE.entrySet().iterator(); while (it.hasNext()) { final CacheKey key = it.next().getKey(); if (key.loader == classLoader) { it.remove(); } } }
python
def _post_create(atdepth, entry, result): """Finishes the entry logging if applicable. """ if not atdepth and entry is not None: if result is not None: #We need to get these results a UUID that will be saved so that any #instance methods applied to this object has a parent to refer to. retid = _tracker_str(result) entry["r"] = retid ekey = retid else: # pragma: no cover ekey = _tracker_str(cls) msg.info("{}: {}".format(ekey, entry), 1) record(ekey, entry)
java
protected static DateTime floor(DateTime date, HistogramIntervalType interval) { DateTime rval = date.withMillisOfSecond(0); switch (interval) { case day: rval = rval.withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); break; case hour: rval = rval.withMinuteOfHour(0).withSecondOfMinute(0); break; case minute: rval = rval.withSecondOfMinute(0); break; case month: rval = rval.withDayOfMonth(1).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); break; case week: rval = rval.withDayOfWeek(DateTimeConstants.MONDAY).withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0); break; } return rval; }
java
public byte[] getCommentsTranscript(String accountId, String envelopeId) throws ApiException { return getCommentsTranscript(accountId, envelopeId, null); }
python
def median1d(self, name, return_errors=False): """ Return median 1d marginalized parameters Parameters ---------- name: str The name of the parameter requested return_errors: Optional, {bool, False} If true, return a second and third parameter that represents the lower and upper 90% error on the parameter. Returns ------- param: float or tuple The requested parameter """ if return_errors: mid = self.data[name]['best'] low, high = self.data[name]['err'] return (mid, low, high) else: return self.data[name]['best']
java
@Override public Object getObjectInstance(Object o, Name n, Context c, Hashtable<?, ?> envmt) throws Exception { @SuppressWarnings("unchecked") Hashtable<String, Object> env = (Hashtable<String, Object>) envmt; return (o instanceof Reference) ? resolve((Reference) o, env) : null; }
python
def _convert(reddit_session, data): """Return a Redditor object from the data.""" retval = Redditor(reddit_session, data['name'], fetch=False) retval.id = data['id'].split('_')[1] # pylint: disable=C0103,W0201 return retval
java
public void marshall(SageMakerMachineLearningModelResourceData sageMakerMachineLearningModelResourceData, ProtocolMarshaller protocolMarshaller) { if (sageMakerMachineLearningModelResourceData == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(sageMakerMachineLearningModelResourceData.getDestinationPath(), DESTINATIONPATH_BINDING); protocolMarshaller.marshall(sageMakerMachineLearningModelResourceData.getSageMakerJobArn(), SAGEMAKERJOBARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public Result<T> put(T service) { check(service); ServiceType serviceType = service.resolveServiceType(); T previous = null; lock(); try { ServiceType existingServiceType = keysToServiceTypes.get(service.getKey()); if (existingServiceType != null && !existingServiceType.equals(serviceType)) throw new ServiceLocationException("Invalid registration of service " + service.getKey() + ": already registered under service type " + existingServiceType + ", cannot be registered also under service type " + serviceType, SLPError.INVALID_REGISTRATION); keysToServiceTypes.put(service.getKey(), serviceType); previous = keysToServiceInfos.put(service.getKey(), service); service.setRegistered(true); if (previous != null) previous.setRegistered(false); } finally { unlock(); } notifyServiceAdded(previous, service); return new Result<T>(previous, service); }
java
public PreparedStatement prepareStatement(final String sql, final int autoGeneratedKeys) throws SQLException { return internalPrepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, autoGeneratedKeys); }
java
private static PharmacophoreQuery processPharmacophoreElement(Element e, HashMap<String, String> global) throws CDKException { PharmacophoreQuery ret = new PharmacophoreQuery(); ret.setProperty("description", e.getAttributeValue("description")); ret.setTitle(e.getAttributeValue("name")); // first get any local group definitions HashMap<String, String> local = getGroupDefinitions(e); // now lets look at the constraints Elements children = e.getChildElements(); for (int i = 0; i < children.size(); i++) { Element child = children.get(i); if (child.getQualifiedName().equals("distanceConstraint")) { processDistanceConstraint(child, local, global, ret); } else if (child.getQualifiedName().equals("angleConstraint")) { processAngleConstraint(child, local, global, ret); } } return ret; }
python
def init_db(): """Initialize a new database with the default tables for chill. Creates the following tables: Chill Node Node_Node Route Query Template """ with current_app.app_context(): for filename in CHILL_CREATE_TABLE_FILES: db.execute(text(fetch_query_string(filename)))
python
def handle_url_build_error(self, error: Exception, endpoint: str, values: dict) -> str: """Handle a build error. Ideally this will return a valid url given the error endpoint and values. """ for handler in self.url_build_error_handlers: result = handler(error, endpoint, values) if result is not None: return result raise error
java
public static double toDegrees(double x) { if (Double.isInfinite(x) || x == 0.0) { // Matches +/- 0.0; return correct sign return x; } // These are 180/PI split into high and low order bits final double facta = 57.2957763671875; final double factb = 3.145894820876798E-6; double xa = doubleHighPart(x); double xb = x - xa; return xb * factb + xb * facta + xa * factb + xa * facta; }
python
def local_error(self, originalValue, calculatedValue): """Calculates the error between the two given values. :param list originalValue: List containing the values of the original data. :param list calculatedValue: List containing the values of the calculated TimeSeries that corresponds to originalValue. :return: Returns the error measure of the two given values. :rtype: numeric """ originalValue = originalValue[0] calculatedValue = calculatedValue[0] # error is zero if not originalValue and not calculatedValue: return 0.0 return abs(calculatedValue - originalValue)/ ((abs(originalValue) + abs(calculatedValue))/2) * 100
java
@Override public ReceiveMessageResult receiveMessage(ReceiveMessageRequest request) { request = beforeClientExecution(request); return executeReceiveMessage(request); }
python
def parseFilename(filename): """ Parse out filename from any specified extensions. Returns rootname and string version of extension name. """ # Parse out any extension specified in filename _indx = filename.find('[') if _indx > 0: # Read extension name provided _fname = filename[:_indx] _extn = filename[_indx + 1:-1] else: _fname = filename _extn = None return _fname, _extn
python
def _check_servers(self): """Check the servers variable and convert in a valid tuple form""" new_servers = [] def check_format(server): if server.scheme not in ["thrift", "http", "https"]: raise RuntimeError("Unable to recognize protocol: \"%s\"" % _type) if server.scheme == "thrift": if not thrift_connect: raise RuntimeError("If you want to use thrift, please install thrift. \"pip install thrift\"") if server.port is None: raise RuntimeError("If you want to use thrift, please provide a port number") new_servers.append(server) for server in self.servers: if isinstance(server, (tuple, list)): if len(list(server)) != 3: raise RuntimeError("Invalid server definition: \"%s\"" % repr(server)) _type, host, port = server server = urlparse('%s://%s:%s' % (_type, host, port)) check_format(server) elif isinstance(server, six.string_types): if server.startswith(("thrift:", "http:", "https:")): server = urlparse(server) check_format(server) continue else: tokens = [t for t in server.split(":") if t.strip()] if len(tokens) == 2: host = tokens[0] try: port = int(tokens[1]) except ValueError: raise RuntimeError("Invalid port: \"%s\"" % tokens[1]) if 9200 <= port <= 9299: _type = "http" elif 9500 <= port <= 9599: _type = "thrift" else: raise RuntimeError("Unable to recognize port-type: \"%s\"" % port) server = urlparse('%s://%s:%s' % (_type, host, port)) check_format(server) self.servers = new_servers
java
public static Query not(Query query) { Query q = new Query(false); q.add("$not", query.toJson()); return q; }
python
def get_rate_limits(response): """Returns a list of rate limit information from a given response's headers.""" periods = response.headers['X-RateLimit-Period'] if not periods: return [] rate_limits = [] periods = periods.split(',') limits = response.headers['X-RateLimit-Limit'].split(',') remaining = response.headers['X-RateLimit-Remaining'].split(',') reset = response.headers['X-RateLimit-Reset'].split(',') for idx, period in enumerate(periods): rate_limit = {} limit_period = get_readable_time_string(period) rate_limit["period"] = limit_period rate_limit["period_seconds"] = period rate_limit["request_limit"] = limits[idx] rate_limit["requests_remaining"] = remaining[idx] reset_datetime = get_datetime_from_timestamp(reset[idx]) rate_limit["reset"] = reset_datetime right_now = datetime.now() if (reset_datetime is not None) and (right_now < reset_datetime): # add 1 second because of rounding seconds_remaining = (reset_datetime - right_now).seconds + 1 else: seconds_remaining = 0 rate_limit["reset_in_seconds"] = seconds_remaining rate_limit["time_to_reset"] = get_readable_time_string(seconds_remaining) rate_limits.append(rate_limit) return rate_limits
java
@XmlElementDecl(namespace = "urn:oasis:names:tc:xacml:2.0:context:schema:os", name = "Decision") public JAXBElement<DecisionType> createDecision(DecisionType value) { return new JAXBElement<DecisionType>(_Decision_QNAME, DecisionType.class, null, value); }
java
public void setBucketAcl(String bucketName, CannedAccessControlList cannedAcl, RequestMetricCollector requestMetricCollector) throws SdkClientException, AmazonServiceException { SetBucketAclRequest request = new SetBucketAclRequest(bucketName, cannedAcl) .withRequestMetricCollector(requestMetricCollector); setBucketAcl(request); }
python
def intersection(self, x): """ Newly set a time range that overlaps the input and the current time range. :param DateTimeRange x: Value to compute intersection with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.intersection(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:05:00+0900 - 2015-03-22T10:10:00+0900 """ self.validate_time_inversion() x.validate_time_inversion() if any([x.start_datetime in self, self.start_datetime in x]): start_datetime = max(self.start_datetime, x.start_datetime) end_datetime = min(self.end_datetime, x.end_datetime) else: start_datetime = None end_datetime = None return DateTimeRange( start_datetime=start_datetime, end_datetime=end_datetime, start_time_format=self.start_time_format, end_time_format=self.end_time_format, )
java
@NonNull @SuppressWarnings("WeakerAccess") public SnackbarWrapper appendMessage(@StringRes int message) { return appendMessage(context.getString(message)); }
java
public PushProcessorPipeline build(List<PushProcessorSupplier> processor_suppliers) throws Exception { ApiServer api = null; PushMetricRegistryInstance registry = null; final List<PushProcessor> processors = new ArrayList<>(processor_suppliers.size()); try { final EndpointRegistration epr; if (epr_ == null) epr = api = new ApiServer(api_sockaddr_); else epr = epr_; registry = cfg_.create(PushMetricRegistryInstance::new, epr); for (PushProcessorSupplier pps : processor_suppliers) processors.add(pps.build(epr)); if (history_ != null) registry.setHistory(history_); if (api != null) api.start(); return new PushProcessorPipeline(registry, collect_interval_seconds_, processors); } catch (Exception ex) { try { if (api != null) api.close(); } catch (Exception ex1) { ex.addSuppressed(ex1); } try { if (registry != null) registry.close(); } catch (Exception ex1) { ex.addSuppressed(ex1); } for (PushProcessor pp : processors) { try { pp.close(); } catch (Exception ex1) { ex.addSuppressed(ex1); } } throw ex; } }
java
@Nonnull public static <T, R> LFunction<T, R> functionFrom(Consumer<LFunctionBuilder<T, R>> buildingFunction) { LFunctionBuilder builder = new LFunctionBuilder(); buildingFunction.accept(builder); return builder.build(); }
java
private static Map<Locale, String> getAllPageDocs(IAnnotationBinding[] annotations) { // all @PageDoc or @Page annotations including annotations contained in @PageDocs or @Page List<IAnnotationBinding> allPageAnnotations = new ArrayList<>(2); List<Class<?>> singlePageAnnotationClasses = new ArrayList<>(2); singlePageAnnotationClasses.add(PageDoc.class); singlePageAnnotationClasses.add(Page.class); for (Class<?> annotationClass : singlePageAnnotationClasses) { IAnnotationBinding annotation = getAnnotationBinding(annotations, annotationClass); if (annotation == null) { continue; // annotation is not found } if (allPageAnnotations.size() > 0) { // TODO throw IllegalTestScriptException throw new RuntimeException("don't use multiple page annoations at the same place"); } allPageAnnotations.add(annotation); } List<Class<?>> multiplePageAnnotationClasses = new ArrayList<>(2); multiplePageAnnotationClasses.add(PageDocs.class); multiplePageAnnotationClasses.add(Pages.class); for (Class<?> annotationClass : multiplePageAnnotationClasses) { IAnnotationBinding annotation = getAnnotationBinding(annotations, annotationClass); if (annotation == null) { continue; // annotation is not found } if (allPageAnnotations.size() > 0) { // TODO throw IllegalTestScriptException throw new RuntimeException("don't use multiple page annoations at the same place"); } // get @PageDoc or @Page from @PageDocs or @Pages Object value = getAnnotationValue(annotation, "value"); Object[] values = (Object[]) value; for (Object element : values) { allPageAnnotations.add((IAnnotationBinding) element); } } // get resultPageMap Map<Locale, String> resultPageMap = new HashMap<>(allPageAnnotations.size()); for (IAnnotationBinding eachPageAnnotation : allPageAnnotations) { Object value = getAnnotationValue(eachPageAnnotation, "value"); Locale locale = getAnnotationLocaleValue(eachPageAnnotation, "locale"); resultPageMap.put(locale, (String) value); } return resultPageMap; }
python
def create_session(self, options): """Create the session factory used by :meth:`create_scoped_session`. The factory **must** return an object that SQLAlchemy recognizes as a session, or registering session events may raise an exception. Valid factories include a :class:`~sqlalchemy.orm.session.Session` class or a :class:`~sqlalchemy.orm.session.sessionmaker`. The default implementation creates a ``sessionmaker`` for :class:`SignallingSession`. :param options: dict of keyword arguments passed to session class """ return orm.sessionmaker(class_=SignallingSession, db=self, **options)
java
@Override public Iterator<String> getChildren() { // Return empty list if the wrapped file is null, if it isn't an existing // directory, or if we don't have a root. We will not resolve resources (or traverse // parent/child) if we aren't associated with a root if (!root.isDirectory()) return ResourceUtils.EMPTY_STRING_LIST.iterator(); return ResourceUtils.getChildren(this, root); }
java
private static void createTermvectorFirstRound( List<ComponentTermVector> termVectorList, Map<Integer, Integer> positionsData, List<Integer> docSet, Terms t, LeafReader r, LeafReaderContext lrc) throws IOException { if (t != null) { BytesRef term; TermsEnum termsEnum; PostingsEnum postingsEnum = null; String segmentName = "segment" + lrc.ord; String[] mutableKey = new String[1]; int segmentNumber = lrc.parent.leaves().size(); // loop over termvectors for (ComponentTermVector termVector : termVectorList) { CompiledAutomaton compiledAutomaton; if ((termVector.regexp == null) || (termVector.regexp.isEmpty())) { RegExp re = new RegExp( termVector.prefix + MtasToken.DELIMITER + ".*"); compiledAutomaton = new CompiledAutomaton(re.toAutomaton()); } else { RegExp re = new RegExp(termVector.prefix + MtasToken.DELIMITER + termVector.regexp + "\u0000*"); compiledAutomaton = new CompiledAutomaton(re.toAutomaton()); } List<ByteRunAutomaton> ignoreByteRunAutomatonList = null; if ((termVector.ignoreRegexp != null) && (!termVector.ignoreRegexp.isEmpty())) { ignoreByteRunAutomatonList = new ArrayList<>(); RegExp re = new RegExp(termVector.prefix + MtasToken.DELIMITER + termVector.ignoreRegexp + "\u0000*"); ignoreByteRunAutomatonList .add(new ByteRunAutomaton(re.toAutomaton())); } if (termVector.ignoreList != null) { if (ignoreByteRunAutomatonList == null) { ignoreByteRunAutomatonList = new ArrayList<>(); } Map<String, Automaton> list = MtasToken.createAutomatonMap( termVector.prefix, new ArrayList<String>(termVector.ignoreList), termVector.ignoreListRegexp ? false : true); for (Automaton automaton : list.values()) { ignoreByteRunAutomatonList.add(new ByteRunAutomaton(automaton)); } } if (!termVector.full && termVector.list == null) { termsEnum = t.intersect(compiledAutomaton, null); int initSize = Math.min((int) t.size(), 1000); termVector.subComponentFunction.dataCollector.initNewList(initSize, segmentName, segmentNumber, termVector.boundary); if (termVector.functions != null) { for (SubComponentFunction function : termVector.functions) { function.dataCollector.initNewList(initSize); } } // only if documents if (!docSet.isEmpty()) { int termDocId; int termNumberMaximum = termVector.number; HashMap<BytesRef, RegisterStatus> computeFullList = new HashMap<>(); RegisterStatus registerStatus; // basic, don't need full values if (termVector.subComponentFunction.sortType .equals(CodecUtil.SORT_TERM) || termVector.subComponentFunction.sortType .equals(CodecUtil.STATS_TYPE_SUM) || termVector.subComponentFunction.sortType .equals(CodecUtil.STATS_TYPE_N)) { int termCounter = 0; boolean continueAfterPreliminaryCheck; boolean preliminaryCheck = false; if (r.getLiveDocs() == null && (docSet.size() != r.numDocs())) { preliminaryCheck = true; } // loop over terms boolean acceptedTerm; while ((term = termsEnum.next()) != null) { if (validateTermWithStartValue(term, termVector) && validateTermWithDistance(term, termVector)) { termDocId = -1; acceptedTerm = true; if (ignoreByteRunAutomatonList != null) { for (ByteRunAutomaton ignoreByteRunAutomaton : ignoreByteRunAutomatonList) { if (ignoreByteRunAutomaton.run(term.bytes, term.offset, term.length)) { acceptedTerm = false; break; } } } if (acceptedTerm) { continueAfterPreliminaryCheck = true; mutableKey[0] = null; if (preliminaryCheck) { try { TermvectorNumberBasic preliminaryNumberBasic = computeTermvectorNumberBasic( termsEnum, r); if (preliminaryNumberBasic.docNumber > 0) { continueAfterPreliminaryCheck = preliminaryRegisterValue( term, termVector, preliminaryNumberBasic, termNumberMaximum, segmentNumber, mutableKey); } else { continueAfterPreliminaryCheck = false; } } catch (IOException e) { log.debug(e); continueAfterPreliminaryCheck = true; } } if (continueAfterPreliminaryCheck) { // compute numbers; TermvectorNumberBasic numberBasic = computeTermvectorNumberBasic( docSet, termDocId, termsEnum, r, lrc, postingsEnum); // register if (numberBasic.docNumber > 0) { termCounter++; registerStatus = registerValue(term, termVector, numberBasic, termNumberMaximum, segmentNumber, false, mutableKey); if (registerStatus != null) { computeFullList.put(BytesRef.deepCopyOf(term), registerStatus); } } } // stop after termCounterMaximum if (termVector.subComponentFunction.sortType .equals(CodecUtil.SORT_TERM) && termVector.subComponentFunction.sortDirection .equals(CodecUtil.SORT_ASC) && termCounter >= termNumberMaximum) { break; } } } } // rerun for full if (computeFullList.size() > 0) { termsEnum = t.intersect(compiledAutomaton, null); while ((term = termsEnum.next()) != null) { if (validateTermWithStartValue(term, termVector) && validateTermWithDistance(term, termVector)) { termDocId = -1; mutableKey[0] = null; // only if (probably) needed if (computeFullList.containsKey(term)) { registerStatus = computeFullList.get(term); boolean doAdd; doAdd = termVector.subComponentFunction.sortType .equals(CodecUtil.SORT_TERM); doAdd |= termVector.subComponentFunction.sortDirection .equals(CodecUtil.SORT_ASC); doAdd |= termVector.list != null; doAdd |= termVector.boundaryRegistration; doAdd |= registerStatus.force; doAdd |= termVector.subComponentFunction.dataCollector .validateSegmentBoundary(registerStatus.sortValue); if (doAdd) { TermvectorNumberFull numberFull = computeTermvectorNumberFull( docSet, termDocId, termsEnum, lrc, postingsEnum, positionsData); if (numberFull.docNumber > 0) { termCounter++; registerValue(term, termVector, numberFull, mutableKey); } } } } } computeFullList.clear(); } } else { throw new IOException( "sort '" + termVector.subComponentFunction.sortType + " " + termVector.subComponentFunction.sortDirection + "' not supported"); } // finish if segments are used termVector.subComponentFunction.dataCollector .closeSegmentKeyValueRegistration(); if (termVector.functions != null) { for (SubComponentFunction function : termVector.functions) { function.dataCollector.closeSegmentKeyValueRegistration(); } } } termVector.subComponentFunction.dataCollector.closeNewList(); if (termVector.functions != null) { for (SubComponentFunction function : termVector.functions) { function.dataCollector.closeNewList(); } } } } } }
java
public short getBlockReplication(BlockInfo block) { if (storage.isSourceBlock(block)) { return getReplication(); } else { if (storage.getStorageType() == StorageType.RAID_STORAGE) { return ((INodeRaidStorage)storage).getCodec().parityReplication; } else { throw new IllegalStateException("parity block " + block + " belongs to a non-raid file"); } } }
python
def _gen_flood_wrap(self, data, rmsimg, innerclip, outerclip=None, domask=False): """ Generator function. Segment an image into islands and return one island at a time. Needs to work for entire image, and also for components within an island. Parameters ---------- data : 2d-array Image array. rmsimg : 2d-array Noise image. innerclip, outerclip :float Seed (inner) and flood (outer) clipping values. domask : bool If True then look for a region mask in globals, only return islands that are within the region. Default = False. Yields ------ data_box : 2d-array A island of sources with subthreshold values masked. xmin, xmax, ymin, ymax : int The corners of the data_box within the initial data array. """ if outerclip is None: outerclip = innerclip # compute SNR image (data has already been background subtracted) snr = abs(data) / rmsimg # mask of pixles that are above the outerclip a = snr >= outerclip # segmentation a la scipy l, n = label(a) f = find_objects(l) if n == 0: self.log.debug("There are no pixels above the clipping limit") return self.log.debug("{1} Found {0} islands total above flood limit".format(n, data.shape)) # Yield values as before, though they are not sorted by flux for i in range(n): xmin, xmax = f[i][0].start, f[i][0].stop ymin, ymax = f[i][1].start, f[i][1].stop if np.any(snr[xmin:xmax, ymin:ymax] > innerclip): # obey inner clip constraint # self.log.info("{1} Island {0} is above the inner clip limit".format(i, data.shape)) data_box = copy.copy(data[xmin:xmax, ymin:ymax]) # copy so that we don't blank the master data data_box[np.where( snr[xmin:xmax, ymin:ymax] < outerclip)] = np.nan # blank pixels that are outside the outerclip data_box[np.where(l[xmin:xmax, ymin:ymax] != i + 1)] = np.nan # blank out other summits # check if there are any pixels left unmasked if not np.any(np.isfinite(data_box)): # self.log.info("{1} Island {0} has no non-masked pixels".format(i,data.shape)) continue if domask and (self.global_data.region is not None): y, x = np.where(snr[xmin:xmax, ymin:ymax] >= outerclip) # convert indices of this sub region to indices in the greater image yx = list(zip(y + ymin, x + xmin)) ra, dec = self.global_data.wcshelper.wcs.wcs_pix2world(yx, 1).transpose() mask = self.global_data.region.sky_within(ra, dec, degin=True) # if there are no un-masked pixels within the region then we skip this island. if not np.any(mask): continue self.log.debug("Mask {0}".format(mask)) # self.log.info("{1} Island {0} will be fit".format(i, data.shape)) yield data_box, xmin, xmax, ymin, ymax
python
def active_devices(self, active_devices): """ Sets the active_devices of this ReportBillingData. :param active_devices: The active_devices of this ReportBillingData. :type: int """ if active_devices is None: raise ValueError("Invalid value for `active_devices`, must not be `None`") if active_devices is not None and active_devices < 0: raise ValueError("Invalid value for `active_devices`, must be a value greater than or equal to `0`") self._active_devices = active_devices
java
private void processComponentJar(URL jarFileURL, WorkList workList, List<String> implicitClasspath) { LOG.debug("Processing {}", jarFileURL); if (!jarFileURL.toString().endsWith(".zip") && !jarFileURL.toString().endsWith(".jar")) { return; } try { URL manifestURL = new URL("jar:" + jarFileURL.toString() + "!/META-INF/MANIFEST.MF"); InputStream in = null; try { in = manifestURL.openStream(); Manifest manifest = new Manifest(in); Attributes mainAttrs = manifest.getMainAttributes(); String classPath = mainAttrs.getValue("Class-Path"); if (classPath != null) { String[] fileList = classPath.split("\\s+"); for (String jarFile : fileList) { URL referencedURL = workList.createRelativeURL(jarFileURL, jarFile); if (workList.add(new WorkListItem(referencedURL))) { implicitClasspath.add(referencedURL.toString()); LOG.debug("Implicit jar: {}", referencedURL); } } } } finally { if (in != null) { in.close(); } } } catch (IOException ignore) { // Ignore } }
python
def get_string_index_oid(self, s): """Turns a string into an oid format is length of name followed by name chars in ascii""" return (len(self.get_bytes(s)), ) + self.get_bytes(s)
python
def _AddExtractionProcessStatusTableRow(self, process_status, table_view): """Adds an extraction process status table row. Args: process_status (ProcessStatus): processing status. table_view (CLITabularTableView): table view. """ used_memory = self._FormatSizeInUnitsOf1024(process_status.used_memory) sources = '' if (process_status.number_of_produced_sources is not None and process_status.number_of_produced_sources_delta is not None): sources = '{0:d} ({1:d})'.format( process_status.number_of_produced_sources, process_status.number_of_produced_sources_delta) events = '' if (process_status.number_of_produced_events is not None and process_status.number_of_produced_events_delta is not None): events = '{0:d} ({1:d})'.format( process_status.number_of_produced_events, process_status.number_of_produced_events_delta) # TODO: shorten display name to fit in 80 chars and show the filename. table_view.AddRow([ process_status.identifier, process_status.pid, process_status.status, used_memory, sources, events, process_status.display_name])
java
private void buildApplicationData(Context context) throws JSONException { app = new JSONObject(); app.put("package", context.getPackageName()); try { PackageInfo info = context.getPackageManager().getPackageInfo(context.getPackageName(), 0); app.put("versionCode", info.versionCode); app.put("versionName", info.versionName); // TODO firstInstallTime and lastUpdate } catch (NameNotFoundException e) { // Cannot happen as we're checking a know package. } }
java
public SqlQueryParameterWrap addParameters(String name, Object first, Object second, Object... rest) { super.addParameters(name, first, second, rest); return this; }
python
def register(name, func=None): """ Function or decorator which registers a given function as a recognized control command. """ def decorator(func): # Perform the registration ControlDaemon._register(name, func) return func # If func was given, call the decorator, otherwise, return the # decorator if func: return decorator(func) else: return decorator
java
public static base_response add(nitro_service client, appfwjsoncontenttype resource) throws Exception { appfwjsoncontenttype addresource = new appfwjsoncontenttype(); addresource.jsoncontenttypevalue = resource.jsoncontenttypevalue; addresource.isregex = resource.isregex; return addresource.add_resource(client); }
java
public String buttonHtml(CmsWorkplace wp) { if (!m_handler.isVisible(wp)) { return ""; } String link = CmsToolManager.linkForToolPath( wp.getJsp(), getHandler().getPath(), getHandler().getParameters(wp)); String onClic = "openPage('" + link + "');"; return A_CmsHtmlIconButton.defaultButtonHtml( CmsHtmlIconButtonStyleEnum.BIG_ICON_TEXT, getId(), m_handler.getShortName(), m_handler.isEnabled(wp) ? m_handler.getHelpText() : m_handler.getDisabledHelpText(), m_handler.isEnabled(wp), m_handler.getIconPath(), m_handler.getConfirmationMessage(), onClic); }
java
public boolean evict(S evictableStore, EvictionPolicyEvaluator<A, E> evictionPolicyEvaluator, EvictionChecker evictionChecker, EvictionListener<A, E> evictionListener) { if (evictionChecker != null) { if (evictionChecker.isEvictionRequired()) { return evictInternal(evictableStore, evictionPolicyEvaluator, evictionListener); } else { return false; } } else { return evictInternal(evictableStore, evictionPolicyEvaluator, evictionListener); } }
java
public static long systemTimeIntervalBetween( long earlierTime, long laterTime, TimeUnit systemTimeIntervalUnit) { long intervalNanos = laterTime - earlierTime; return systemTimeIntervalUnit.convert(intervalNanos, NANOSECONDS); }
java
public float ENgetnodevalue( int index, NodeParameters code ) throws EpanetException { float[] nodeValue = new float[1]; int error = epanet.ENgetnodevalue(index, code.getCode(), nodeValue); checkError(error); return nodeValue[0]; }
python
def read_validating_webhook_configuration(self, name, **kwargs): """ read the specified ValidatingWebhookConfiguration This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_validating_webhook_configuration(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ValidatingWebhookConfiguration (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. Deprecated. Planned for removal in 1.18. :param bool export: Should this value be exported. Export strips fields that a user can not specify. Deprecated. Planned for removal in 1.18. :return: V1beta1ValidatingWebhookConfiguration If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_validating_webhook_configuration_with_http_info(name, **kwargs) else: (data) = self.read_validating_webhook_configuration_with_http_info(name, **kwargs) return data
python
def _no_slots_copy(dct): """Internal helper: copy class __dict__ and clean slots class variables. (They will be re-created if necessary by normal class machinery.) """ dict_copy = dict(dct) if '__slots__' in dict_copy: for slot in dict_copy['__slots__']: dict_copy.pop(slot, None) return dict_copy
python
def get_object(self): """ Get the object for previewing. Raises a http404 error if the object is not found. """ obj = super(PreviewWrapper, self).get_object() if not obj: raise http.Http404 return obj
java
@Nonnull @ReturnsMutableCopy public ICommonsList <CSSSupportsRule> getAllSupportsRules () { return m_aRules.getAllMapped (r -> r instanceof CSSSupportsRule, r -> (CSSSupportsRule) r); }
java
public boolean waitForWebElement(By by, int timeout, boolean scroll){ if(config.commandLogging){ Log.d(config.commandLoggingTag, "waitForWebElement("+by+", "+timeout+", "+scroll+")"); } return (waiter.waitForWebElement(by, 0, timeout, scroll) != null); }
python
def _eta_from_phi(self): """Update `eta` using current `phi`.""" self.eta = scipy.ndarray(N_NT - 1, dtype='float') etaprod = 1.0 for w in range(N_NT - 1): self.eta[w] = 1.0 - self.phi[w] / etaprod etaprod *= self.eta[w] _checkParam('eta', self.eta, self.PARAMLIMITS, self.PARAMTYPES)
java
@Override public WsByteBuffer[] getRawResponseBodyBuffers() throws IOException, IllegalHttpBodyException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "getRawResponseBodyBuffers(sync)"); } setRawBody(true); WsByteBuffer[] list = getResponseBodyBuffers(); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "getRawResponseBodyBuffers(sync): " + list); } return list; }
python
def create_value(cls, prop_name, val, model=None): # @NoSelf """This is used to create a value to be assigned to a property. Depending on the type of the value, different values are created and returned. For example, for a list, a ListWrapper is created to wrap it, and returned for the assignment. model is different from None when the value is changed (a model exists). Otherwise, during property creation model is None""" if isinstance(val, tuple): # this might be a class instance to be wrapped # (thanks to Tobias Weber for # providing a bug fix to avoid TypeError (in 1.99.1) if len(val) == 3: try: wrap_instance = isinstance(val[1], val[0]) and \ (isinstance(val[2], tuple) or isinstance(val[2], list)) except TypeError: pass # not recognized, it must be another type of tuple else: if wrap_instance: res = wrappers.ObsUserClassWrapper(val[1], val[2]) if model: res.__add_model__(model, prop_name) return res elif isinstance(val, list): res = wrappers.ObsListWrapper(val) if model: res.__add_model__(model, prop_name) return res elif isinstance(val, set): res = wrappers.ObsSetWrapper(val) if model: res.__add_model__(model, prop_name) return res elif isinstance(val, dict): res = wrappers.ObsMapWrapper(val) if model: res.__add_model__(model, prop_name) return res return val
java
@Override public List<byte[]> lrange(final byte[] key, final long start, final long stop) { checkIsInMultiOrPipeline(); client.lrange(key, start, stop); return client.getBinaryMultiBulkReply(); }
python
def imagearm(sdmfile, scan, segment, npix=512, res=50, **kwargs): """ Function to do end-to-end 1d, arm-based imaging """ import sdmpy sdm = sdmpy.SDM(sdmfile) ants = {ant.stationId:ant.name for ant in sdm['Antenna']} stations = {st.stationId: st.name for st in sdm['Station'] if 'X' not in str(st.name)} west = [int(str(ants[st]).lstrip('ea')) for st in stations if 'W' in str(stations[st])] east = [int(str(ants[st]).lstrip('ea')) for st in stations if 'E' in str(stations[st])] north = [int(str(ants[st]).lstrip('ea')) for st in stations if 'N' in str(stations[st])] d = set_pipeline(sdmfile, scan, **kwargs) blarr = rtlib.calc_blarr(d) selwest = [i for i in range(len(blarr)) if all([b in west for b in blarr[i]])] seleast = [i for i in range(len(blarr)) if all([b in east for b in blarr[i]])] selnorth = [i for i in range(len(blarr)) if all([b in north for b in blarr[i]])] u,v,w = ps.get_uvw_segment(d, segment=segment) data = pipeline_reproduce(d, segment=segment, product='data') dataw = data[:,selwest].mean(axis=3).mean(axis=2) datae = data[:,seleast].mean(axis=3).mean(axis=2) datan = data[:,selnorth].mean(axis=3).mean(axis=2) uw = u[selwest] ue = u[seleast] un = u[selnorth] vw = v[selwest] ve = v[seleast] vn = v[selnorth] grid = n.zeros((len(data), npix), dtype='complex64') grid2 = n.zeros((len(data), npix), dtype='float32') datalist = [] for (uu, vv, dd) in [(uw, vw, dataw), (ue, ve, datae), (un, vn, datan)]: # uu = n.round(uu/res).astype(int) # vv = n.round(vv/res).astype(int) uu = n.mod(uu/res, npix) vv = n.mod(vv/res, npix) uv = n.sqrt(uu**2 + vv**2) uv = n.round(uv).astype(int) for i in range(len(uv)): if uv[i] < 512: grid[:, uv[i]] = dd[:, i] grid2 = n.fft.ifft(grid, axis=1).real datalist.append(grid2) return datalist
python
def first_order_markov_process(t, variance, time_scale, rseed=None): """ Generates a correlated noise vector using a multivariate normal random number generator with zero mean and covariance Sigma_ij = s^2 exp(-|t_i - t_j|/l), where s is the variance and l is the time scale. The Power spectral density associated to this covariance is S(f) = 2*l*s^2/(4*pi^2*f^2*l^2 +1), red noise spectrum is defined as proportional to 1/f^2. This covariance is the one expected from a first order markov process (Reference?) Parameters --------- t: ndarray A time vector for which the red noise vector will be sampled variance: positive float variance of the resulting red noise vector time_scale: positive float Parameter of the covariance matrix Returns ------- red_noise: ndarray Vector containing the red noise realizations See also -------- power_law_noise """ if variance < 0.0: raise ValueError("Variance must be positive") if time_scale < 0.0: raise ValueError("Time scale must be positive") np.random.seed(rseed) N = len(t) mu = np.zeros(shape=(N,)) if variance == 0.0: return mu dt = np.repeat(np.reshape(t, (1, -1)), N, axis=0) dt = np.absolute(dt - dt.T) # This is NxN S = variance*np.exp(-np.absolute(dt)/time_scale) red_noise = np.random.multivariate_normal(mu, S) return red_noise
java
@Override protected void subAppend(LoggingEvent event) { super.subAppend(event); // That should've gone into our writer. Notify the LogContext. String logOutput = writer.toString(); writer.reset(); OperationLog log = operationManager.getOperationLogByThread(); if (log == null) { LOG.debug(" ---+++=== Dropped log event from thread " + event.getThreadName()); return; } log.writeOperationLog(logOutput); }
java
public Endpoint getOrCreateEndpoint(TestContext context) { if (endpoint != null) { return endpoint; } else if (StringUtils.hasText(endpointUri)) { endpoint = context.getEndpointFactory().create(endpointUri, context); return endpoint; } else { throw new CitrusRuntimeException("Neither endpoint nor endpoint uri is set properly!"); } }
java
private TimeConverter.ConvertedTime getModificationDate(PipelineInstanceModel item) { Date mostRecentModificationDate = item.getBuildCause().getMaterialRevisions().getDateOfLatestModification(); return timeConverter.getConvertedTime(mostRecentModificationDate); }
java
@Override public void extraInsert(final FinderObject gob) { objects.put(gob.getString(), (GedObject) gob); addAttribute((GedObject) gob); }
python
def update_campaign_destroy(self, campaign_id, **kwargs): # noqa: E501 """Delete a campaign # noqa: E501 Delete an update campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_destroy(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The ID of the update campaign (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.update_campaign_destroy_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_destroy_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
python
def off_datastream(self, datastream): """ To turn off datastream :param datastream: string """ url = '/datastream/' + str(datastream) + '/off' response = self.http.post(url,"") return response
java
private void _addActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event__addActionPerformed JFileChooser fc = new JFileChooser(); fc.setMultiSelectionEnabled(true); fc.setFileSelectionMode(JFileChooser.FILES_ONLY); fc.setFileFilter(new FileFilter() { @Override public boolean accept(File f) { return f.isDirectory() || f.getName().endsWith(".R") || f.getName().endsWith(".Rdata"); } @Override public String getDescription() { return "R object file"; } }); if (fc.showOpenDialog(this) == JFileChooser.APPROVE_OPTION && fc.getSelectedFiles() != null) { File[] files = fc.getSelectedFiles(); for (File file : files) { if (file.getName().endsWith(".R")) { if (R != null) { R.source(file); } } else if (file.getName().endsWith(".Rdata")) { if (R != null) { R.load(file); } } else { Log.Out.println("Not loading/sourcing " + file.getName()); } } } update(); }
java
public static final String toHex(final char separator, final byte... bin) { if (bin == null || bin.length == 0) return ""; char[] buffer = new char[(bin.length * 3) - 1]; int end = bin.length - 1; int base = 0; // Store the index of buffer we're inserting into for (int i = 0; i < bin.length; i++) { byte b = bin[i]; buffer[base++] = hex[(b >> 4) & 0x0F]; buffer[base++] = hex[b & 0x0F]; if (i != end) buffer[base++] = separator; } return new String(buffer); }
python
def nest2ring(nside, ipix): """Drop-in replacement for healpy `~healpy.pixelfunc.nest2ring`.""" ipix = np.atleast_1d(ipix).astype(np.int64, copy=False) return nested_to_ring(ipix, nside)
java
protected void drawFeatures( float scale , int offsetX , int offsetY , FastQueue<Point2D_F64> all, FastQueue<Point2D_F64> inliers, Homography2D_F64 currToGlobal, Graphics2D g2 ) { Point2D_F64 distPt = new Point2D_F64(); for( int i = 0; i < all.size; i++ ) { HomographyPointOps_F64.transform(currToGlobal, all.get(i), distPt); distPt.x = offsetX + distPt.x*scale; distPt.y = offsetY + distPt.y*scale; VisualizeFeatures.drawPoint(g2, (int) distPt.x, (int) distPt.y, Color.RED); } for( int i = 0; i < inliers.size; i++ ) { HomographyPointOps_F64.transform(currToGlobal,inliers.get(i),distPt); distPt.x = offsetX + distPt.x*scale; distPt.y = offsetY + distPt.y*scale; VisualizeFeatures.drawPoint(g2, (int) distPt.x, (int) distPt.y, Color.BLUE); } }
python
def firstId(self) -> BaseReference: """ First child's id of current TextualNode """ if self.childIds is not None: if len(self.childIds) > 0: return self.childIds[0] return None else: raise NotImplementedError
python
def confirm_user_avatar(self, user, cropping_properties): """Confirm the temporary avatar image previously uploaded with the specified cropping. After a successful registry with :py:meth:`create_temp_user_avatar`, use this method to confirm the avatar for use. The final avatar can be a subarea of the uploaded image, which is customized with the ``cropping_properties``: the return value of :py:meth:`create_temp_user_avatar` should be used for this argument. :param user: the user to confirm the avatar for :type user: str :param cropping_properties: a dict of cropping properties from :py:meth:`create_temp_user_avatar` :type cropping_properties: Dict[str,Any] """ data = cropping_properties url = self._get_url('user/avatar') r = self._session.post(url, params={'username': user}, data=json.dumps(data)) return json_loads(r)
java
public OrderingOrderingType<OrderingType<T>> getOrCreateBefore() { Node node = childNode.getOrCreate("before"); OrderingOrderingType<OrderingType<T>> before = new OrderingOrderingTypeImpl<OrderingType<T>>(this, "before", childNode, node); return before; }
java
public static filteraction[] get(nitro_service service) throws Exception{ filteraction obj = new filteraction(); filteraction[] response = (filteraction[])obj.get_resources(service); return response; }
java
@SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BpsimPackage.BP_SIM_DATA_TYPE__GROUP: ((FeatureMap.Internal)getGroup()).set(newValue); return; case BpsimPackage.BP_SIM_DATA_TYPE__SCENARIO: getScenario().clear(); getScenario().addAll((Collection<? extends Scenario>)newValue); return; } super.eSet(featureID, newValue); }