language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def refresh_leader_status(self, instance): """ calls kubeutil.refresh_leader and compares the resulting leader status with the previous one. If it changed, update the event collection logic """ if not self.leader_candidate: return leader_status = self.kubeutil.is_leader self.kubeutil.refresh_leader() # nothing changed, no-op if leader_status == self.kubeutil.is_leader: return # else, reset the event collection config else: self.log.info("Leader status changed, updating event collection config...") self._configure_event_collection(instance)
java
@SuppressWarnings("rawtypes") @Override public EntityManagerFactory createEntityManagerFactory(final String unit, final Map map) { initJpaCounter(); final PersistenceProvider persistenceProvider = findDelegate(map); final ClassLoader tccl = tccl(); final ClassLoader hack = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() { // pour findbugs /** {@inheritDoc} */ @Override public ClassLoader run() { return new JpaOverridePersistenceXmlClassLoader(tccl, persistenceProvider.getClass().getName()); } }); Thread.currentThread().setContextClassLoader(hack); try { final EntityManagerFactory entityManagerFactory = persistenceProvider .createEntityManagerFactory(unit, map); if (entityManagerFactory == null) { return null; } return JpaWrapper.createEntityManagerFactoryProxy(entityManagerFactory); } finally { Thread.currentThread().setContextClassLoader(tccl); } }
python
def replace_namespaced_service_status(self, name, namespace, body, **kwargs): # noqa: E501 """replace_namespaced_service_status # noqa: E501 replace status of the specified Service # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_service_status(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Service (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Service body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Service If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_service_status_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.replace_namespaced_service_status_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data
python
def temp_output_file(prefix="tmp", suffix="", dir=None, make_parents=False, always_clean=False): """ A context manager for convenience in creating a temporary file, which is deleted when exiting the context. Usage: with temp_output_file() as (fd, path): ... """ return _temp_output(False, prefix=prefix, suffix=suffix, dir=dir, make_parents=make_parents, always_clean=always_clean)
java
public Packer putBytesF(final byte[] value) { putInt(value.length); ensureCapacity(bufPosition + value.length); System.arraycopy(value, 0, buf, bufPosition, value.length); bufPosition += value.length; return this; }
java
void addToRelockList(final DeviceProxy dev, final int validity) throws DevFailed { // Check if it is the first relock if (relockMap == null) { // Create hash table for admin devices object relockMap = new Hashtable<String, LockedDeviceAmin>(); // Create a thread to unlock all devices at exit Runtime.getRuntime().addShutdownHook(new ShutdownThread()); // Create thread fo garbage callector call preiodicaly new GarbageThread().start(); } // Check if admin device already exists. String adm; try { adm = dev.adm_name(); } catch (final DevFailed e) { // Give up return; } final LockedDevice ld = new LockedDevice(dev.get_name(), validity); LockedDeviceAmin lda; if (!relockMap.containsKey(adm)) { // if not, crate and add th the list lda = new LockedDeviceAmin(adm, ld); lda.start(); relockMap.put(adm, lda); } else { lda = relockMap.get(adm); lda.add(ld); } }
python
def blockcode(self, text, lang): """ Pass a code fence through pygments """ if lang and self._config.get('highlight_syntax', 'True'): try: lexer = pygments.lexers.get_lexer_by_name(lang, stripall=True) except pygments.lexers.ClassNotFound: lexer = None if lexer: formatter = pygments.formatters.HtmlFormatter() # pylint: disable=no-member return pygments.highlight(text, lexer, formatter) return '\n<div class="highlight"><pre>{}</pre></div>\n'.format( flask.escape(text.strip()))
java
public T save() { beforeAll(); if (isNew()) { beforeInsert(); crudService.insert(entity); afterInsert(); } else { beforeUpdate(); entity = crudService.update(entity); afterUpdate(); } afterAll(); return entity; }
python
def make_stalecheck_middleware( allowable_delay, skip_stalecheck_for_methods=SKIP_STALECHECK_FOR_METHODS): """ Use to require that a function will run only of the blockchain is recently updated. This middleware takes an argument, so unlike other middleware, you must make the middleware with a method call. For example: `make_stalecheck_middleware(60*5)` If the latest block in the chain is older than 5 minutes in this example, then the middleware will raise a StaleBlockchain exception. """ if allowable_delay <= 0: raise ValueError("You must set a positive allowable_delay in seconds for this middleware") def stalecheck_middleware(make_request, web3): cache = {'latest': None} def middleware(method, params): if method not in skip_stalecheck_for_methods: if _isfresh(cache['latest'], allowable_delay): pass else: latest = web3.eth.getBlock('latest') if _isfresh(latest, allowable_delay): cache['latest'] = latest else: raise StaleBlockchain(latest, allowable_delay) return make_request(method, params) return middleware return stalecheck_middleware
java
public Method getMethod(String returnType, String name, String... paramTypeNames) { final Map<ParamNameList, Map<String, Method>> nameMap = methodsByTypeName.get(name); if (nameMap == null) { return null; } final Map<String, Method> paramsMap = nameMap.get(createParamNameList(paramTypeNames)); if (paramsMap == null) { return null; } return paramsMap.get(returnType); }
java
public SIMPIterator getRemoteSubscriptions() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getRemoteSubscriptions"); List durableConsumers = new LinkedList(); if(_anycastInputHandler!=null) { //we have a durable consumer durableConsumers.add(_anycastInputHandler); } AttachedRemoteSubscriberIterator remoteSubscriptionItr = new AttachedRemoteSubscriberIterator(durableConsumers); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getRemoteSubscriptions", remoteSubscriptionItr); return remoteSubscriptionItr; }
java
public static void sortNode(TreeNode node, Comparator comparator) { TreeNodeList children = (TreeNodeList) node.getChildren(); if (children != null && !children.isEmpty()) { Object[] childrenArray = children.toArray(); Arrays.sort(childrenArray, comparator); for (int i = 0; i < childrenArray.length; i++) { children.setSibling(i, (TreeNode) childrenArray[i]); } for (int i = 0; i < children.size(); i++) { sortNode(children.get(i), comparator); } } }
python
def update_correlated(self, z, R=None, H=None): """ Add a new measurement (z) to the Kalman filter assuming that process noise and measurement noise are correlated as defined in the `self.M` matrix. If z is None, nothing is changed. Parameters ---------- z : (dim_z, 1): array_like measurement for this update. z can be a scalar if dim_z is 1, otherwise it must be convertible to a column vector. R : np.array, scalar, or None Optionally provide R to override the measurement noise for this one call, otherwise self.R will be used. H : np.array, or None Optionally provide H to override the measurement function for this one call, otherwise self.H will be used. """ # set to None to force recompute self._log_likelihood = None self._likelihood = None self._mahalanobis = None if z is None: self.z = np.array([[None]*self.dim_z]).T self.x_post = self.x.copy() self.P_post = self.P.copy() self.y = zeros((self.dim_z, 1)) return z = reshape_z(z, self.dim_z, self.x.ndim) if R is None: R = self.R elif isscalar(R): R = eye(self.dim_z) * R # rename for readability and a tiny extra bit of speed if H is None: H = self.H # handle special case: if z is in form [[z]] but x is not a column # vector dimensions will not match if self.x.ndim == 1 and shape(z) == (1, 1): z = z[0] if shape(z) == (): # is it scalar, e.g. z=3 or z=np.array(3) z = np.asarray([z]) # y = z - Hx # error (residual) between measurement and prediction self.y = z - dot(H, self.x) # common subexpression for speed PHT = dot(self.P, H.T) # project system uncertainty into measurement space self.S = dot(H, PHT) + dot(H, self.M) + dot(self.M.T, H.T) + R self.SI = self.inv(self.S) # K = PH'inv(S) # map system uncertainty into kalman gain self.K = dot(PHT + self.M, self.SI) # x = x + Ky # predict new x with residual scaled by the kalman gain self.x = self.x + dot(self.K, self.y) self.P = self.P - dot(self.K, dot(H, self.P) + self.M.T) self.z = deepcopy(z) self.x_post = self.x.copy() self.P_post = self.P.copy()
java
static Predicate<DateValue> weekIntervalFilter(final int interval, final DayOfWeek weekStart, final DateValue dtStart) { return new Predicate<DateValue>() { private static final long serialVersionUID = 7059994888520369846L; //the latest day with day of week weekStart on or before dtStart DateValue wkStart; { DTBuilder wkStartB = new DTBuilder(dtStart); wkStartB.day -= (7 + TimeUtils.dayOfWeek(dtStart).getCalendarConstant() - weekStart.getCalendarConstant()) % 7; wkStart = wkStartB.toDate(); } public boolean apply(DateValue date) { int daysBetween = TimeUtils.daysBetween(date, wkStart); if (daysBetween < 0) { //date must be before dtStart. Shouldn't occur in practice. daysBetween += (interval * 7 * (1 + daysBetween / (-7 * interval))); } int off = (daysBetween / 7) % interval; return off == 0; } }; }
python
def OnMoreSquareToggle( self, event ): """Toggle the more-square view (better looking, but more likely to filter records)""" self.squareMap.square_style = not self.squareMap.square_style self.squareMap.Refresh() self.moreSquareViewItem.Check(self.squareMap.square_style)
python
def ParseLocalEntryRow( self, parser_mediator, query, row, cache=None, database=None, **unused_kwargs): """Parses a local entry row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. cache (Optional[SQLiteCache]): cache. database (Optional[SQLiteDatabase]): database. """ query_hash = hash(query) inode_number = self._GetRowValue(query_hash, row, 'inode_number') local_path = self.GetLocalPath(inode_number, cache, database) event_data = GoogleDriveSnapshotLocalEntryEventData() event_data.path = local_path event_data.query = query event_data.size = self._GetRowValue(query_hash, row, 'size') timestamp = self._GetRowValue(query_hash, row, 'modified') date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)
python
def rotation_coefs(self): """ get the rotation coefficents in radians Returns ------- rotation_coefs : list the rotation coefficients implied by Vario2d.bearing """ return [np.cos(self.bearing_rads), np.sin(self.bearing_rads), -1.0*np.sin(self.bearing_rads), np.cos(self.bearing_rads)]
python
def toLily(self): ''' Method which converts the object instance, its attributes and children to a string of lilypond code :return: str of lilypond code ''' lilystring = "" if self.item is not None: if not isinstance(self.GetChild(0), NoteNode): if hasattr(self.item, "chord") and self.item.chord: self.item.chord = "stop" if isinstance(self.GetChild(0), NoteNode): if not hasattr(self.item, "chord") or not self.item.chord: self.item.chord = "start" lilystring += self.item.toLily() children = self.GetChildrenIndexes() written = False for child in children: if self.GetChild(child) is not None: if isinstance(self.GetChild(child), NoteNode): lilystring += " " return_val = self.GetChild(child).toLily() if isinstance(return_val, str): lilystring += return_val else: lilystring = return_val[0] + lilystring + return_val[1] if isinstance(child, OtherNodes.ExpressionNode): written = True lilystring += self.item.GetClosingNotationLilies() if len(children) == 0 or not written: lilystring += self.item.GetClosingNotationLilies() return lilystring
java
public Map<String, Object> entityProps(ManagedObjectReference entityMor, String[] props) throws InvalidPropertyFaultMsg, RuntimeFaultFaultMsg { final HashMap<String, Object> retVal = new HashMap<>(); // Create PropertyFilterSpec using the PropertySpec and ObjectPec PropertyFilterSpec[] propertyFilterSpecs = {new PropertyFilterSpecBuilder().propSet( // Create Property Spec new PropertySpecBuilder().all(false).type(entityMor.getType()).pathSet(props)) .objectSet( // Now create Object Spec new ObjectSpecBuilder().obj(entityMor))}; List<ObjectContent> objCont = vimPort.retrievePropertiesEx(serviceContent.getPropertyCollector(), Arrays.asList(propertyFilterSpecs), new RetrieveOptions()).getObjects(); if (objCont != null) { for (ObjectContent oc : objCont) { List<DynamicProperty> dps = oc.getPropSet(); for (DynamicProperty dp : dps) { retVal.put(dp.getName(), dp.getVal()); } } } return retVal; }
java
public final void setColor(Color color, int x, int y) { int[] pixels = ((DataBufferInt)img.getRaster().getDataBuffer()).getData(); int red = (int)(color.getRed() * 255.0); int green = (int)(color.getGreen() * 255.0); int blue = (int)(color.getBlue() * 255.0); int alpha = (int)(color.getAlpha() * 255.0); pixels[x + y * width] = alpha << 24 | red << 16 | green << 8 | blue; }
python
def copy(self): """Create a copy of the Vector""" cpy_vec = Vector4() cpy_vec.x = self.x cpy_vec.y = self.y cpy_vec.z = self.z cpy_vec.w = self.w return cpy_vec
python
def _full_axis_reduce(self, axis, func, alternate_index=None): """Applies map that reduce Manager to series but require knowledge of full axis. Args: func: Function to reduce the Manager by. This function takes in a Manager. axis: axis to apply the function to. alternate_index: If the resulting series should have an index different from the current query_compiler's index or columns. Return: Pandas series containing the reduced data. """ result = self.data.map_across_full_axis(axis, func) if axis == 0: columns = alternate_index if alternate_index is not None else self.columns return self.__constructor__(result, index=["__reduced__"], columns=columns) else: index = alternate_index if alternate_index is not None else self.index return self.__constructor__(result, index=index, columns=["__reduced__"])
java
private Type parseReferenceType(EnclosingScope scope) { int start = index; match(Ampersand); // Try to parse an annotated lifetime int backtrack = index; Identifier lifetimeIdentifier = parseOptionalLifetimeIdentifier(scope, false); if (lifetimeIdentifier != null) { // We cannot allow a newline after the colon, as it would // unintentionally match a return type that happens to be reference // type without lifetime annotation (return type in method signature // is always followed by colon and newline). if (tryAndMatch(true, Colon) != null && !isAtEOL()) { // Now we know that there is an annotated lifetime scope.mustBeLifetime(lifetimeIdentifier); Type element = parseArrayType(scope); Type type = new Type.Reference(element, lifetimeIdentifier); return annotateSourceLocation(type,start); } } index = backtrack; Type element = parseArrayType(scope); Type type = new Type.Reference(element); return annotateSourceLocation(type,start); }
python
def addToNetwork(grph, nds, count, weighted, nodeType, nodeInfo, fullInfo, coreCitesDict, coreValues, detailedValues, addCR, recordToCite = True, headNd = None): """Addeds the citations _nds_ to _grph_, according to the rules give by _nodeType_, _fullInfo_, etc. _headNd_ is the citation of the Record """ if headNd is not None: hID = makeID(headNd, nodeType) if nodeType == 'full' or nodeType == 'original': hYear = getattr(headNd, "year") if hID not in grph: nodeName, nodeDat = makeNodeTuple(headNd, hID, nodeInfo, fullInfo, nodeType, count, coreCitesDict, coreValues, detailedValues, addCR) grph.add_node(nodeName, **nodeDat) else: hID = None idList = [] yearList = [] for n in nds: nID = makeID(n, nodeType) if nodeType == 'full' or nodeType == 'original': try: nYear = getattr(n, "year") except: nYear = None yearList.append(nYear) if nID not in grph: nodeName, nodeDat = makeNodeTuple(n, nID, nodeInfo, fullInfo, nodeType, count, coreCitesDict, coreValues, detailedValues, addCR) grph.add_node(nodeName, **nodeDat) elif count: grph.node[nID]['count'] += 1 idList.append(nID) addedEdges = [] if hID: for i in range(len(idList)): nID = idList[i] if nodeType == 'full' or nodeType == 'original': nYear = yearList[i] try: yearDiff = abs(hYear - nYear) except: yearDiff = None if weighted: try: if recordToCite: grph[hID][nID]['weight'] += 1 else: grph[nID][hID]['weight'] += 1 except KeyError: if recordToCite: grph.add_edge(hID, nID, weight=1, yearDiff=yearDiff) else: grph.add_edge(nID, hID, weight=1, yearDiff=yearDiff) elif nID not in grph[hID]: addedEdges.append((hID, nID)) elif weighted: try: if recordToCite: grph[hID][nID]['weight'] += 1 else: grph[nID][hID]['weight'] += 1 except KeyError: if recordToCite: grph.add_edge(hID, nID, weight=1) else: grph.add_edge(hID, nID, weight=1) elif nID not in grph[hID]: addedEdges.append((hID, nID, {yearDiff: yearDiff})) elif len(idList) > 1: for i, outerID in enumerate(idList): for innerID in idList[i + 1:]: if weighted: try: grph[outerID][innerID]['weight'] += 1 except KeyError: grph.add_edge(outerID, innerID, weight = 1) elif innerID not in grph[outerID]: addedEdges.append((outerID, innerID)) grph.add_edges_from(addedEdges)
java
@Override public RowQuery<K, C> getKey(final K rowKey) { return new AbstractRowQueryImpl<K, C>(columnFamily.getColumnSerializer()) { private boolean firstPage = true; @Override public ColumnQuery<C> getColumn(final C column) { return new ColumnQuery<C>() { @Override public OperationResult<Column<C>> execute() throws ConnectionException { return connectionPool.executeWithFailover(new AbstractKeyspaceOperationImpl<Column<C>>( tracerFactory.newTracer(CassandraOperationType.GET_COLUMN, columnFamily), pinnedHost, keyspace.getKeyspaceName()) { @Override public Column<C> internalExecute(Client client, ConnectionContext context) throws Exception { ColumnOrSuperColumn cosc = client.get( columnFamily.getKeySerializer().toByteBuffer(rowKey), new org.apache.cassandra.thrift.ColumnPath().setColumn_family( columnFamily.getName()).setColumn( columnFamily.getColumnSerializer().toByteBuffer(column)), ThriftConverter.ToThriftConsistencyLevel(consistencyLevel)); if (cosc.isSetColumn()) { org.apache.cassandra.thrift.Column c = cosc.getColumn(); return new ThriftColumnImpl<C>(columnFamily.getColumnSerializer().fromBytes( c.getName()), c); } else if (cosc.isSetSuper_column()) { // TODO: Super columns // should be deprecated SuperColumn sc = cosc.getSuper_column(); return new ThriftSuperColumnImpl<C>(columnFamily.getColumnSerializer().fromBytes( sc.getName()), sc); } else if (cosc.isSetCounter_column()) { org.apache.cassandra.thrift.CounterColumn c = cosc.getCounter_column(); return new ThriftCounterColumnImpl<C>(columnFamily.getColumnSerializer().fromBytes( c.getName()), c); } else if (cosc.isSetCounter_super_column()) { // TODO: Super columns // should be deprecated CounterSuperColumn sc = cosc.getCounter_super_column(); return new ThriftCounterSuperColumnImpl<C>(columnFamily.getColumnSerializer() .fromBytes(sc.getName()), sc); } else { throw new RuntimeException("Unknown column type in response"); } } @Override public ByteBuffer getRowKey() { return columnFamily.getKeySerializer().toByteBuffer(rowKey); } }, retry); } @Override public ListenableFuture<OperationResult<Column<C>>> executeAsync() throws ConnectionException { return executor.submit(new Callable<OperationResult<Column<C>>>() { @Override public OperationResult<Column<C>> call() throws Exception { return execute(); } }); } }; } @Override public OperationResult<ColumnList<C>> execute() throws ConnectionException { return connectionPool.executeWithFailover( new AbstractKeyspaceOperationImpl<ColumnList<C>>(tracerFactory.newTracer( CassandraOperationType.GET_ROW, columnFamily), pinnedHost, keyspace.getKeyspaceName()) { @Override public ColumnList<C> execute(Client client, ConnectionContext context) throws ConnectionException { if (isPaginating && paginateNoMore) { return new EmptyColumnList<C>(); } return super.execute(client, context); } @Override public ColumnList<C> internalExecute(Client client, ConnectionContext context) throws Exception { List<ColumnOrSuperColumn> columnList = client.get_slice(columnFamily.getKeySerializer() .toByteBuffer(rowKey), new ColumnParent().setColumn_family(columnFamily .getName()), predicate, ThriftConverter .ToThriftConsistencyLevel(consistencyLevel)); // Special handling for pagination if (isPaginating && predicate.isSetSlice_range()) { // Did we reach the end of the query. if (columnList.size() != predicate.getSlice_range().getCount()) { paginateNoMore = true; } // If this is the first page then adjust the // count so we fetch one extra column // that will later be dropped if (firstPage) { firstPage = false; if (predicate.getSlice_range().getCount() != Integer.MAX_VALUE) predicate.getSlice_range().setCount(predicate.getSlice_range().getCount() + 1); } else { if (!columnList.isEmpty()) columnList.remove(0); } // Set the start column for the next page to // the last column of this page. // We will discard this column later. if (!columnList.isEmpty()) { ColumnOrSuperColumn last = Iterables.getLast(columnList); if (last.isSetColumn()) { predicate.getSlice_range().setStart(last.getColumn().getName()); } else if (last.isSetCounter_column()) { predicate.getSlice_range().setStart(last.getCounter_column().getName()); } else if (last.isSetSuper_column()) { // TODO: Super columns // should be deprecated predicate.getSlice_range().setStart(last.getSuper_column().getName()); } else if (last.isSetCounter_super_column()) { // TODO: Super columns // should be deprecated predicate.getSlice_range().setStart(last.getCounter_super_column().getName()); } } } ColumnList<C> result = new ThriftColumnOrSuperColumnListImpl<C>(columnList, columnFamily.getColumnSerializer()); return result; } @Override public ByteBuffer getRowKey() { return columnFamily.getKeySerializer().toByteBuffer(rowKey); } }, retry); } @Override public ColumnCountQuery getCount() { return new ColumnCountQuery() { @Override public OperationResult<Integer> execute() throws ConnectionException { return connectionPool.executeWithFailover(new AbstractKeyspaceOperationImpl<Integer>( tracerFactory.newTracer(CassandraOperationType.GET_COLUMN_COUNT, columnFamily), pinnedHost, keyspace.getKeyspaceName()) { @Override public Integer internalExecute(Client client, ConnectionContext context) throws Exception { return client.get_count(columnFamily.getKeySerializer().toByteBuffer(rowKey), new ColumnParent().setColumn_family(columnFamily.getName()), predicate, ThriftConverter.ToThriftConsistencyLevel(consistencyLevel)); } @Override public ByteBuffer getRowKey() { return columnFamily.getKeySerializer().toByteBuffer(rowKey); } }, retry); } @Override public ListenableFuture<OperationResult<Integer>> executeAsync() throws ConnectionException { return executor.submit(new Callable<OperationResult<Integer>>() { @Override public OperationResult<Integer> call() throws Exception { return execute(); } }); } }; } @Override public ListenableFuture<OperationResult<ColumnList<C>>> executeAsync() throws ConnectionException { return executor.submit(new Callable<OperationResult<ColumnList<C>>>() { @Override public OperationResult<ColumnList<C>> call() throws Exception { return execute(); } }); } @Override public RowCopier<K, C> copyTo(final ColumnFamily<K, C> otherColumnFamily, final K otherRowKey) { return new RowCopier<K, C>() { private boolean useOriginalTimestamp = true; @Override public OperationResult<Void> execute() throws ConnectionException { return connectionPool.executeWithFailover( new AbstractKeyspaceOperationImpl<Void>(tracerFactory.newTracer( CassandraOperationType.COPY_TO, columnFamily), pinnedHost, keyspace .getKeyspaceName()) { @Override public Void internalExecute(Client client, ConnectionContext context) throws Exception { long currentTime = keyspace.getConfig().getClock().getCurrentTime(); List<ColumnOrSuperColumn> columnList = client.get_slice(columnFamily .getKeySerializer().toByteBuffer(rowKey), new ColumnParent() .setColumn_family(columnFamily.getName()), predicate, ThriftConverter .ToThriftConsistencyLevel(consistencyLevel)); // Create mutation list from columns in // the response List<Mutation> mutationList = new ArrayList<Mutation>(); for (ColumnOrSuperColumn sosc : columnList) { ColumnOrSuperColumn cosc; if (sosc.isSetColumn()) { cosc = new ColumnOrSuperColumn().setColumn(sosc.getColumn()); if (!useOriginalTimestamp) cosc.getColumn().setTimestamp(currentTime); } else if (sosc.isSetSuper_column()) { cosc = new ColumnOrSuperColumn().setSuper_column(sosc.getSuper_column()); if (!useOriginalTimestamp) { for (org.apache.cassandra.thrift.Column subColumn : sosc.getSuper_column().getColumns()) { subColumn.setTimestamp(currentTime); subColumn.setTimestamp(currentTime); } } } else if (sosc.isSetCounter_column()) { cosc = new ColumnOrSuperColumn().setCounter_column(sosc.getCounter_column()); } else if (sosc.isSetCounter_super_column()) { cosc = new ColumnOrSuperColumn().setCounter_super_column(sosc.getCounter_super_column()); } else { continue; } mutationList.add(new Mutation().setColumn_or_supercolumn(cosc)); } // Create mutation map Map<ByteBuffer, Map<String, List<Mutation>>> mutationMap = new HashMap<ByteBuffer, Map<String, List<Mutation>>>(); HashMap<String, List<Mutation>> cfmap = new HashMap<String, List<Mutation>>(); cfmap.put(otherColumnFamily.getName(), mutationList); mutationMap.put(columnFamily.getKeySerializer().toByteBuffer(otherRowKey), cfmap); // Execute the mutation client.batch_mutate(mutationMap, ThriftConverter.ToThriftConsistencyLevel(consistencyLevel)); return null; } }, retry); } @Override public ListenableFuture<OperationResult<Void>> executeAsync() throws ConnectionException { return executor.submit(new Callable<OperationResult<Void>>() { @Override public OperationResult<Void> call() throws Exception { return execute(); } }); } @Override public RowCopier<K, C> withOriginalTimestamp(boolean useOriginalTimestamp) { this.useOriginalTimestamp = useOriginalTimestamp; return this; } }; } }; }
java
public void setMethodDefaults(String method) { String defaultMethod = m_properties.getProperty(OutputKeys.METHOD); if((null == defaultMethod) || !defaultMethod.equals(method) // bjm - add the next condition as a hack // but it is because both output_xml.properties and // output_unknown.properties have the same method=xml // for their default. Otherwise we end up with // a ToUnknownStream wraping a ToXMLStream even // when the users says method="xml" // || defaultMethod.equals("xml") ) { Properties savedProps = m_properties; Properties newDefaults = OutputPropertiesFactory.getDefaultMethodProperties(method); m_properties = new Properties(newDefaults); copyFrom(savedProps, false); } }
java
@NullSafe public static boolean isDigits(String value) { for (char chr : toCharArray(value)) { if (!Character.isDigit(chr)) { return false; } } return hasText(value); }
python
def page(self, category=values.unset, start_date=values.unset, end_date=values.unset, include_subaccounts=values.unset, page_token=values.unset, page_number=values.unset, page_size=values.unset): """ Retrieve a single page of DailyInstance records from the API. Request is executed immediately :param DailyInstance.Category category: The usage category of the UsageRecord resources to read :param date start_date: Only include usage that has occurred on or after this date :param date end_date: Only include usage that occurred on or before this date :param bool include_subaccounts: Whether to include usage from the master account and all its subaccounts :param str page_token: PageToken provided by the API :param int page_number: Page Number, this value is simply for client state :param int page_size: Number of records to return, defaults to 50 :returns: Page of DailyInstance :rtype: twilio.rest.api.v2010.account.usage.record.daily.DailyPage """ params = values.of({ 'Category': category, 'StartDate': serialize.iso8601_date(start_date), 'EndDate': serialize.iso8601_date(end_date), 'IncludeSubaccounts': include_subaccounts, 'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, }) response = self._version.page( 'GET', self._uri, params=params, ) return DailyPage(self._version, response, self._solution)
python
def change_ssh_port(): """ For security woven changes the default ssh port. """ host = normalize(env.host_string)[1] after = env.port before = str(env.DEFAULT_SSH_PORT) host_string=join_host_strings(env.user,host,before) with settings(host_string=host_string, user=env.user): if env.verbosity: print env.host, "CHANGING SSH PORT TO: "+str(after) sed('/etc/ssh/sshd_config','Port '+ str(before),'Port '+str(after),use_sudo=True) if env.verbosity: print env.host, "RESTARTING SSH on",after sudo('/etc/init.d/ssh restart') return True
python
def updateIncomeProcess(self): ''' An alternative method for constructing the income process in the infinite horizon model. Parameters ---------- none Returns ------- none ''' if self.cycles == 0: tax_rate = (self.IncUnemp*self.UnempPrb)/((1.0-self.UnempPrb)*self.IndL) TranShkDstn = deepcopy(approxMeanOneLognormal(self.TranShkCount,sigma=self.TranShkStd[0],tail_N=0)) TranShkDstn[0] = np.insert(TranShkDstn[0]*(1.0-self.UnempPrb),0,self.UnempPrb) TranShkDstn[1] = np.insert(TranShkDstn[1]*(1.0-tax_rate)*self.IndL,0,self.IncUnemp) PermShkDstn = approxMeanOneLognormal(self.PermShkCount,sigma=self.PermShkStd[0],tail_N=0) self.IncomeDstn = [combineIndepDstns(PermShkDstn,TranShkDstn)] self.TranShkDstn = TranShkDstn self.PermShkDstn = PermShkDstn self.addToTimeVary('IncomeDstn') else: # Do the usual method if this is the lifecycle model EstimationAgentClass.updateIncomeProcess(self)
java
public String getString(Object node, String expression) { return (String) evalXPath(expression, node, XPathConstants.STRING); }
python
def make_url(url, *paths): """Joins individual URL strings together, and returns a single string. """ for path in paths: url = re.sub(r'/?$', re.sub(r'^/?', '/', path), url) return url
java
public static <S extends Solution<?>> double[][] distanceMatrix(List<S> solutionSet) { double[][] distance = new double[solutionSet.size()][solutionSet.size()]; for (int i = 0; i < solutionSet.size(); i++) { distance[i][i] = 0.0; for (int j = i + 1; j < solutionSet.size(); j++) { distance[i][j] = SolutionUtils.distanceBetweenObjectives(solutionSet.get(i), solutionSet.get(j)); distance[j][i] = distance[i][j]; } } return distance; }
java
public static Map<String, Object> buildEventAttributeMap(final Principal principal, final Optional<RegisteredService> service, final MultifactorAuthenticationProvider provider) { val map = new HashMap<String, Object>(); map.put(Principal.class.getName(), principal); service.ifPresent(svc -> map.put(RegisteredService.class.getName(), svc)); map.put(MultifactorAuthenticationProvider.class.getName(), provider); return map; }
java
@UiHandler("m_submitButton") public void onClickSubmit(ClickEvent event) { String separator = ","; String selectedValue = m_separator.getText().trim(); if (!CmsStringUtil.isEmptyOrWhitespaceOnly(selectedValue)) { separator = selectedValue; } m_separatorField.setValue(separator); m_formPanel.submit(); }
java
protected Content getFramesJavaScript() { HtmlTree scriptTree = HtmlTree.SCRIPT(); String scriptCode = "\n" + " tmpTargetPage = \"\" + window.location.search;\n" + " if (tmpTargetPage != \"\" && tmpTargetPage != \"undefined\")\n" + " tmpTargetPage = tmpTargetPage.substring(1);\n" + " if (tmpTargetPage.indexOf(\":\") != -1 || (tmpTargetPage != \"\" && !validURL(tmpTargetPage)))\n" + " tmpTargetPage = \"undefined\";\n" + " targetPage = tmpTargetPage;\n" + " function validURL(url) {\n" + " try {\n" + " url = decodeURIComponent(url);\n" + " }\n" + " catch (error) {\n" + " return false;\n" + " }\n" + " var pos = url.indexOf(\".html\");\n" + " if (pos == -1 || pos != url.length - 5)\n" + " return false;\n" + " var allowNumber = false;\n" + " var allowSep = false;\n" + " var seenDot = false;\n" + " for (var i = 0; i < url.length - 5; i++) {\n" + " var ch = url.charAt(i);\n" + " if ('a' <= ch && ch <= 'z' ||\n" + " 'A' <= ch && ch <= 'Z' ||\n" + " ch == '$' ||\n" + " ch == '_' ||\n" + " ch.charCodeAt(0) > 127) {\n" + " allowNumber = true;\n" + " allowSep = true;\n" + " } else if ('0' <= ch && ch <= '9'\n" + " || ch == '-') {\n" + " if (!allowNumber)\n" + " return false;\n" + " } else if (ch == '/' || ch == '.') {\n" + " if (!allowSep)\n" + " return false;\n" + " allowNumber = false;\n" + " allowSep = false;\n" + " if (ch == '.')\n" + " seenDot = true;\n" + " if (ch == '/' && seenDot)\n" + " return false;\n" + " } else {\n" + " return false;\n" + " }\n" + " }\n" + " return true;\n" + " }\n" + " function loadFrames() {\n" + " if (targetPage != \"\" && targetPage != \"undefined\")\n" + " top.classFrame.location = top.targetPage;\n" + " }\n"; RawHtml scriptContent = new RawHtml(scriptCode.replace("\n", DocletConstants.NL)); scriptTree.addContent(scriptContent); return scriptTree; }
java
public void endTiming(String name, float elapsed) { Counter counter = get(name, CounterType.Interval); calculateStats(counter, elapsed); update(); }
python
def closeEvent(self, event: QCloseEvent): """ This function is automatically called when the window is closed using the close [X] button in the window decorations or by right clicking in the system window list and using the close action, or similar ways to close the window. Just ignore this event and simulate that the user used the action_close_window instead. To quote the Qt5 QCloseEvent documentation: If you do not want your widget to be hidden, or want some special handling, you should reimplement the event handler and ignore() the event. """ event.ignore() # Be safe and emit this signal, because it might be connected to multiple slots. self.action_close_window.triggered.emit(True)
python
def display_xdata(self) -> DataAndMetadata.DataAndMetadata: """Return the extended data of this data item display. Display data will always be 1d or 2d and either int, float, or RGB data type. .. versionadded:: 1.0 Scriptable: Yes """ display_data_channel = self.__display_item.display_data_channel return display_data_channel.get_calculated_display_values(True).display_data_and_metadata
java
public ServiceFuture<List<EntityRole>> getCustomPrebuiltEntityRolesAsync(UUID appId, String versionId, UUID entityId, final ServiceCallback<List<EntityRole>> serviceCallback) { return ServiceFuture.fromResponse(getCustomPrebuiltEntityRolesWithServiceResponseAsync(appId, versionId, entityId), serviceCallback); }
java
public String convertMappingOptionMapValueToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); }
python
def pipe_strreplace(context=None, _INPUT=None, conf=None, **kwargs): """A string module that replaces text. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : iterable of items or strings conf : { 'RULE': [ { 'param': {'value': <match type: 1=first, 2=last, 3=every>}, 'find': {'value': <text to find>}, 'replace': {'value': <replacement>} } ] } Returns ------- _OUTPUT : generator of replaced strings """ splits = get_splits(_INPUT, conf['RULE'], **kwargs) parsed = utils.dispatch(splits, *get_dispatch_funcs()) _OUTPUT = starmap(parse_result, parsed) return _OUTPUT
java
@Nonnull private JsonToken nextSymbol() { lastChar = 0; return new JsonToken(JsonToken.Type.SYMBOL, buffer, bufferOffset, 1, lineNo, linePos); }
python
def setTimeStart( self, timeStart ): """ Sets the start time for this item. This will automatically push the end time to match the length for this item. So if the item starts at 11a and ends on 1p, and the start time is changed to 12p the end time will change to 2p. To affect the length of the item, use either setLength, or setTimeEnd. :param timeStart | <QDate> """ timeStart = QTime(timeStart) length = self.length() # in minutes self._timeStart = timeStart self._timeEnd = timeStart.addSecs(length * 60) self.markForRebuild()
python
def xray_botocore_api_call(wrapped, instance, args, kwargs): """Wrapper around botocore's base client API call method.""" return generic_xray_wrapper( wrapped, instance, args, kwargs, name=get_service_name, namespace='aws', metadata_extractor=extract_aws_metadata, error_handling_type=ERROR_HANDLING_BOTOCORE, )
java
public static double[] getOutRectangle( List<GeoPoint> geoPoints ) { double nwLat = geoPoints.get( 0 ).getLatitude(); double nwLon = geoPoints.get( 0 ).getLongitude(); double seLat = geoPoints.get( 0 ).getLatitude(); double seLon = geoPoints.get( 0 ).getLongitude(); double minLon = 0, maxLon = 0, lon = 0; for( int i = 1; i < geoPoints.size(); i++ ) { if( geoPoints.get( i ).getLatitude() > nwLat ) { nwLat = geoPoints.get( i ).getLatitude(); } if( geoPoints.get( i ).getLatitude() < seLat ) { seLat = geoPoints.get( i ).getLatitude(); } double deltaLon = geoPoints.get( i ).getLongitude() - geoPoints.get( i - 1 ).getLongitude(); if( deltaLon < 0 && deltaLon > -180 || deltaLon > 270 ) { if( deltaLon > 270 ) deltaLon -= 360; lon += deltaLon; if( lon < minLon ) minLon = lon; } else if( deltaLon > 0 && deltaLon <= 180 || deltaLon <= -270 ) { if( deltaLon <= -270 ) deltaLon += 360; lon += deltaLon; if( lon > maxLon ) maxLon = lon; } } nwLon += minLon; seLon += maxLon; if( seLon - nwLon >= 360 ) { seLon = 180; nwLon = -180; } else { seLon = updateDegree( seLon ); nwLon = updateDegree( nwLon ); } return new double[] { nwLat, nwLon, seLat, seLon }; }
java
public static RRSIGRecord sign(RRset rrset, DNSKEYRecord key, PrivateKey privkey, Date inception, Date expiration, String provider) throws DNSSECException { int alg = key.getAlgorithm(); checkAlgorithm(privkey, alg); RRSIGRecord rrsig = new RRSIGRecord(rrset.getName(), rrset.getDClass(), rrset.getTTL(), rrset.getType(), alg, rrset.getTTL(), expiration, inception, key.getFootprint(), key.getName(), null); rrsig.setSignature(sign(privkey, key.getPublicKey(), alg, digestRRset(rrsig, rrset), provider)); return rrsig; }
python
def most_visited_pages_stats(): """ Get stats for most visited pages. Args: logs (list): logs data to use. Returns: dict: more_than_10 and less_than_10: list of dict (bound + url list). """ stats = {'more_than_10': [], 'less_than_10': {}} counter = Counter(list(RequestLog.objects.values_list('url', flat=True))) most_visited_pages = counter.most_common() bounds = (10000, 1000, 100, 10) subsets = [[] for _ in bounds] for u, c in most_visited_pages: if url_is_ignored(u): continue if c >= bounds[0]: subsets[0].append([u, c]) elif c < bounds[-1]: subsets[-1].append([u, c]) else: for i, bound in enumerate(bounds[:-1]): if bound > c >= bounds[i+1]: subsets[i+1].append([u, c]) break stats['more_than_10'] = [ {'bound': bound, 'subset': subset} for bound, subset in zip(bounds[:-1], subsets[:-1])] for subset in subsets[:-1]: for uc in subset: if url_is_project(uc[0]): if url_is_asset(uc[0]): uc.append(ASSET) else: uc.append(PROJECT) else: if url_is_asset(uc[0]): uc.append(OLD_ASSET) elif url_is_common_asset(uc[0]): uc.append(COMMON_ASSET) elif url_is_old_project(uc[0]): uc.append(OLD_PROJECT) elif url_is_false_negative(uc[0]): uc.append(FALSE_NEGATIVE) else: uc.append(SUSPICIOUS) occurrences = {name: {'distinct': 0, 'total': 0} for name in set(URL_TYPE.keys()) - {IGNORED}} for u, c in subsets[-1]: if url_is_project(u): if url_is_asset(u): occurrences[ASSET]['distinct'] += 1 occurrences[ASSET]['total'] += c else: occurrences[PROJECT]['distinct'] += 1 occurrences[PROJECT]['total'] += c else: if url_is_asset(u): occurrences[OLD_ASSET]['distinct'] += 1 occurrences[OLD_ASSET]['total'] += c elif url_is_common_asset(u): occurrences[COMMON_ASSET]['distinct'] += 1 occurrences[COMMON_ASSET]['total'] += c elif url_is_old_project(u): occurrences[OLD_PROJECT]['distinct'] += 1 occurrences[OLD_PROJECT]['total'] += c elif url_is_false_negative(u): occurrences[FALSE_NEGATIVE]['distinct'] += 1 occurrences[FALSE_NEGATIVE]['total'] += c else: occurrences[SUSPICIOUS]['distinct'] += 1 occurrences[SUSPICIOUS]['total'] += c stats['less_than_10'] = occurrences return stats
python
def display(self, codes=[], fg=None, bg=None): """Displays the codes using ANSI escapes """ codes, fg, bg = Magic.displayformat(codes, fg, bg) self.stream.write(Magic.display(codes, fg, bg)) self.flush()
java
@Nonnull public static <T> LTieFltConsumer<T> tieFltConsumerFrom(Consumer<LTieFltConsumerBuilder<T>> buildingFunction) { LTieFltConsumerBuilder builder = new LTieFltConsumerBuilder(); buildingFunction.accept(builder); return builder.build(); }
python
def tooltip_queries(self, item, x_coord, y_coord, key_mode, tooltip, text): """ The function is used for setting tooltip on menus and submenus """ tooltip.set_text(text) return True
java
public void doValidRecord(boolean bDisplayOption) { if (m_recPackagesExclude == null) { RecordOwner recordOwner = this.getOwner().findRecordOwner(); m_recPackagesExclude = new Packages(recordOwner); if (recordOwner != null) recordOwner.removeRecord(m_recPackagesExclude); } StringBuffer sb = new StringBuffer(); sb.append("<excludes>\n"); this.scanTreeForExcludes(sb, this.getOwner().getField(Packages.ID).toString()); sb.append("</excludes>"); m_fldExcludePackages.setString(sb.toString()); super.doValidRecord(bDisplayOption); }
python
def loadstack(self): print("Loading stack from: %s" % self.stack_fn) data = np.load(self.stack_fn, encoding='latin1') #self.fn_list = list([i.decode("utf-8") for i in data['fn_list']]) self.fn_list = data['fn_list'] #Load flags originally used for stack creation #self.flags = data['flags'] #{'datestack':self.datestack, 'stats':self.stats, 'med':self.med, 'trend':self.trend, 'sort':self.sort, 'save':self.save} if 'source' in data: self.source = list(data['source']) else: self.source = ['None' for i in self.fn_list] if 'error' in data: self.error = np.ma.fix_invalid(data['error'], fill_value=-9999) else: self.error = np.ma.zeros(len(self.fn_list)) #if 'error_dict_list' in data: # self.error_dict_list = data['error_dict_list'][()] #else: self.error_dict_list = [None for i in self.fn_list] #This is a shortcut, should load from the data['date_list'] arrays if 'date_list_o' in data: from pygeotools.lib import timelib from datetime import datetime self.date_list_o = np.ma.fix_invalid(data['date_list_o'], fill_value=1.0) #This is a hack - need universal timelib time zone support or stripping self.date_list = np.ma.masked_equal([i.replace(tzinfo=None) for i in timelib.o2dt(self.date_list_o)], datetime(1,1,1)) else: self.get_date_list() print("Loading ma stack") self.ma_stack = np.ma.fix_invalid(data['ma_stack_full']).astype(self.dtype) #Note: the str is an intermediate fix - all new stacks should have str written self.proj = str(data['proj']) #If we don't have gt, we're in trouble - can't recompute res/extent if 'gt' in data: self.gt = data['gt'] else: print("No geotransform found in stack") #Check if res and extent are defined - can reconstruct #Should throw error #Note: Once we have gt, could just run get_res() and get_extent() to avoid the following #Or could check to make sure consistent #Some stacks in Oct 2015 and Nov 2015 did not have res/extent saved properly """ if 'res' in data: if data['res'] != 'None': #self.res = float(data['res']) self.res = float(np.atleast_1d(data['res'])[0]) else: self.get_res() else: self.get_res() if 'extent' in data: if data['extent'] != 'None': #self.extent = list(data['extent']) #self.extent = list(np.atleast_1d(data['extent'])[0]) extent = np.atleast_1d(data['extent'])[0] if isinstance(extent, str): self.extent = [float(x) for x in extent.split()] else: self.extent = list(extent) else: self.get_extent() else: self.get_extent() """ #Just do this to be safe, if gt is bad, no point in proceeding self.get_res() self.get_extent() saveflag=False if self.datestack: #statlist = ['dt_stack', 'dt_mean', 'dt_ptp', 'dt_min', 'dt_max', 'dt_center'] statlist = ['dt_ptp', 'dt_min', 'dt_max', 'dt_center'] if all([s in data for s in statlist]): print("Loading datestack") #self.dt_stack = np.ma.fix_invalid(data['dt_stack']).astype(self.dtype) #self.dt_stack_mean = np.ma.fix_invalid(data['dt_mean'], fill_value=-9999).astype(self.dtype) self.dt_stack_ptp = np.ma.fix_invalid(data['dt_ptp'], fill_value=-9999).astype(self.dtype) self.dt_stack_min = np.ma.fix_invalid(data['dt_min'], fill_value=-9999).astype(self.dtype) self.dt_stack_max = np.ma.fix_invalid(data['dt_max'], fill_value=-9999).astype(self.dtype) self.dt_stack_center = np.ma.fix_invalid(data['dt_center'], fill_value=-9999).astype(self.dtype) else: if self.date_list_o.count() > 1: #self.make_datestack() self.compute_dt_stats() self.write_datestack() saveflag=True if self.stats: #Could do this individually to save time statlist = ['count', 'mean', 'std', 'min', 'max'] if self.med: statlist.append('med') statlist.append('nmad') if all([s in data for s in statlist]): print("Loading stats") self.stack_count = np.ma.masked_equal(data['count'], 0).astype(np.uint16) self.stack_mean = np.ma.fix_invalid(data['mean'], fill_value=-9999).astype(self.dtype) self.stack_std = np.ma.fix_invalid(data['std'], fill_value=-9999).astype(self.dtype) self.stack_min = np.ma.fix_invalid(data['min'], fill_value=-9999).astype(self.dtype) self.stack_max = np.ma.fix_invalid(data['max'], fill_value=-9999).astype(self.dtype) if self.med: self.stack_med = np.ma.fix_invalid(data['med'], fill_value=-9999).astype(self.dtype) self.stack_nmad = np.ma.fix_invalid(data['nmad'], fill_value=-9999).astype(self.dtype) else: if self.ma_stack.shape[0] > 1: self.compute_stats() self.write_stats() saveflag=True if self.trend: if 'n_thresh' in data: self.n_thresh = data['n_thresh'] if 'min_dt_ptp' in data: self.min_dt_ptp = data['min_dt_ptp'] if 'robust' in data: self.robust = data['robust'] #statlist = ['trend', 'intercept', 'detrended_std', 'rsquared'] statlist = ['trend', 'intercept', 'detrended_std'] if all([s in data for s in statlist]): print("Loading trend") self.stack_trend = np.ma.fix_invalid(data['trend'], fill_value=-9999).astype(self.dtype) self.stack_intercept = np.ma.fix_invalid(data['intercept'], fill_value=-9999).astype(self.dtype) self.stack_detrended_std = np.ma.fix_invalid(data['detrended_std'], fill_value=-9999).astype(self.dtype) #self.stack_rsquared = np.ma.fix_invalid(data['rsquared'], fill_value=-9999).astype(self.dtype) else: if self.ma_stack.shape[0] >= self.n_thresh: self.compute_trend() self.write_trend() saveflag=True if saveflag: self.savestack() data.close()
java
public void destroy() { if (mContentView != null) { if (mGroupBasicAdapter != null) { mGroupBasicAdapter.destroy(); } mContentView.setAdapter(null); mContentView = null; } TimerSupport timerSupport = getService(TimerSupport.class); if (timerSupport != null) { timerSupport.clear(); } SimpleClickSupport simpleClickSupport = getService(SimpleClickSupport.class); if (simpleClickSupport != null) { simpleClickSupport.destroy(); } ExposureSupport exposureSupport = getService(ExposureSupport.class); if (exposureSupport != null) { exposureSupport.destroy(); } BusSupport busSupport = getService(BusSupport.class); if (busSupport != null) { busSupport.shutdown(); } BannerSupport bannerSupport = getService(BannerSupport.class); if (bannerSupport != null) { bannerSupport.destroy(); } MVHelper mvHelper = getService(MVHelper.class); if (mvHelper != null) { mvHelper.renderManager().destroyRenderService(); } }
python
def set_snap_server_variables(host, port, snap_extension='.xml', path=None): """ Change dynamically port and host variable in xml Snap! project file""" localdir = os.getcwd() if path is None: os.chdir(os.path.dirname(os.path.realpath(__file__))) else: os.chdir(path) xml_files = [f for f in os.listdir('.') if f.endswith(snap_extension)] for filename in xml_files: with open(filename, 'r') as xf: xml = xf.read() # Change host variable xml = re.sub(r'''<variable name="host"><l>[\s\S]*?<\/l><\/variable>''', '''<variable name="host"><l>{}</l></variable>'''.format(host), xml) # Change host argument of "set $robot host" xml = re.sub(r'''<custom-block s="set \$robot host to \%s"><l>[\s\S]*?<\/l>''', '''<custom-block s="set $robot host to %s"><l>{}</l>'''.format(host), xml) # Change port variable xml = re.sub(r'''<variable name="port"><l>[\s\S]*?<\/l><\/variable>''', '''<variable name="port"><l>{}</l></variable>'''.format(port), xml) with open(filename, 'w') as xf: xf.write(xml) os.chdir(localdir)
python
def do_encode(cls, obj): # type: (Any) -> Any """Encodes the passed object into json""" if isinstance(obj, ConjureBeanType): return cls.encode_conjure_bean_type(obj) elif isinstance(obj, ConjureUnionType): return cls.encode_conjure_union_type(obj) elif isinstance(obj, ConjureEnumType): return obj.value elif isinstance(obj, list): return list(map(cls.do_encode, obj)) elif isinstance(obj, dict): return {cls.do_encode(key): cls.do_encode(value) for key, value in obj.items()} else: return cls.encode_primitive(obj)
python
def create_new_client(self, give_focus=True, filename='', is_cython=False, is_pylab=False, is_sympy=False, given_name=None): """Create a new client""" self.master_clients += 1 client_id = dict(int_id=to_text_string(self.master_clients), str_id='A') cf = self._new_connection_file() show_elapsed_time = self.get_option('show_elapsed_time') reset_warning = self.get_option('show_reset_namespace_warning') ask_before_restart = self.get_option('ask_before_restart') client = ClientWidget(self, id_=client_id, history_filename=get_conf_path('history.py'), config_options=self.config_options(), additional_options=self.additional_options( is_pylab=is_pylab, is_sympy=is_sympy), interpreter_versions=self.interpreter_versions(), connection_file=cf, menu_actions=self.menu_actions, options_button=self.options_button, show_elapsed_time=show_elapsed_time, reset_warning=reset_warning, given_name=given_name, ask_before_restart=ask_before_restart, css_path=self.css_path) # Change stderr_dir if requested if self.test_dir is not None: client.stderr_dir = self.test_dir self.add_tab(client, name=client.get_name(), filename=filename) if cf is None: error_msg = self.permission_error_msg.format(jupyter_runtime_dir()) client.show_kernel_error(error_msg) return # Check if ipykernel is present in the external interpreter. # Else we won't be able to create a client if not CONF.get('main_interpreter', 'default'): pyexec = CONF.get('main_interpreter', 'executable') has_spyder_kernels = programs.is_module_installed( 'spyder_kernels', interpreter=pyexec, version='>=1.0.0') if not has_spyder_kernels: client.show_kernel_error( _("Your Python environment or installation doesn't " "have the <tt>spyder-kernels</tt> module or the " "right version of it installed. " "Without this module is not possible for " "Spyder to create a console for you.<br><br>" "You can install it by running in a system terminal:" "<br><br>" "<tt>conda install spyder-kernels</tt>" "<br><br>or<br><br>" "<tt>pip install spyder-kernels</tt>")) return self.connect_client_to_kernel(client, is_cython=is_cython, is_pylab=is_pylab, is_sympy=is_sympy) if client.shellwidget.kernel_manager is None: return self.register_client(client)
java
private boolean tryExecutingCommand(Command cmd) throws IOException, FTPReplyParseException, ServerException { Reply reply = controlChannel.exchange(cmd); return Reply.isPositiveCompletion(reply); }
python
def order_duplicate_volume(self, origin_volume_id, origin_snapshot_id=None, duplicate_size=None, duplicate_iops=None, duplicate_tier_level=None, duplicate_snapshot_size=None, hourly_billing_flag=False): """Places an order for a duplicate block volume. :param origin_volume_id: The ID of the origin volume to be duplicated :param origin_snapshot_id: Origin snapshot ID to use for duplication :param duplicate_size: Size/capacity for the duplicate volume :param duplicate_iops: The IOPS per GB for the duplicate volume :param duplicate_tier_level: Tier level for the duplicate volume :param duplicate_snapshot_size: Snapshot space size for the duplicate :param hourly_billing_flag: Billing type, monthly (False) or hourly (True), default to monthly. :return: Returns a SoftLayer_Container_Product_Order_Receipt """ block_mask = 'id,billingItem[location,hourlyFlag],snapshotCapacityGb,'\ 'storageType[keyName],capacityGb,originalVolumeSize,'\ 'provisionedIops,storageTierLevel,osType[keyName],'\ 'staasVersion,hasEncryptionAtRest' origin_volume = self.get_block_volume_details(origin_volume_id, mask=block_mask) if isinstance(utils.lookup(origin_volume, 'osType', 'keyName'), str): os_type = origin_volume['osType']['keyName'] else: raise exceptions.SoftLayerError( "Cannot find origin volume's os-type") order = storage_utils.prepare_duplicate_order_object( self, origin_volume, duplicate_iops, duplicate_tier_level, duplicate_size, duplicate_snapshot_size, 'block', hourly_billing_flag ) order['osFormatType'] = {'keyName': os_type} if origin_snapshot_id is not None: order['duplicateOriginSnapshotId'] = origin_snapshot_id return self.client.call('Product_Order', 'placeOrder', order)
java
public DeleteSessionResponse deleteSession(DeleteSessionRequest request) { checkNotNull(request, "The parameter request should NOT be null."); checkStringNotEmpty(request.getSessionId(), "The parameter sessionId should NOT be null or empty string."); InternalRequest internalRequest = createRequest(HttpMethodName.DELETE, request, LIVE_SESSION, request.getSessionId()); return invokeHttpClient(internalRequest, DeleteSessionResponse.class); }
python
def bfs(self, root = None, display = None): ''' API: bfs(self, root = None, display = None) Description: Searches tree starting from node named root using breadth-first strategy if root argument is provided. Starts search from root node of the tree otherwise. Pre: Node indicated by root argument should exist. Input: root: Starting node name. display: Display argument. ''' if root == None: root = self.root if display == None: display = self.attr['display'] self.traverse(root, display, Queue())
java
@Override public void cleanupSession(long sessionId) { Set<Long> blockIds; try (LockResource lr = new LockResource(mLock)) { blockIds = mSessionIdToBlockIds.get(sessionId); if (blockIds == null) { return; } } // Note that, there can be a race condition that blockIds can be stale when we release the // access. The race condition only has a minimal negative consequence (printing extra logging // message), and is expected very rare to trigger. for (Long blockId : blockIds) { try { // Note that we don't need to explicitly call abortBlock to cleanup the temp block // in Local block store because they will be cleanup by the session cleaner in the // Local block store. closeReaderOrWriter(sessionId, blockId); releaseAccess(sessionId, blockId); } catch (Exception e) { LOG.warn("Failed to cleanup UFS block {}, session {}.", blockId, sessionId); } } }
java
public int addNodeInDocOrder(Node node, XPathContext support) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!"); return addNodeInDocOrder(node, true, support); }
java
public void declare(TypeId<?> type, String sourceFile, int flags, TypeId<?> supertype, TypeId<?>... interfaces) { TypeDeclaration declaration = getTypeDeclaration(type); int supportedFlags = Modifier.PUBLIC | Modifier.FINAL | Modifier.ABSTRACT | AccessFlags.ACC_SYNTHETIC; if ((flags & ~supportedFlags) != 0) { throw new IllegalArgumentException("Unexpected flag: " + Integer.toHexString(flags)); } if (declaration.declared) { throw new IllegalStateException("already declared: " + type); } declaration.declared = true; declaration.flags = flags; declaration.supertype = supertype; declaration.sourceFile = sourceFile; declaration.interfaces = new TypeList(interfaces); }
java
public IVersionRange merge(IVersionRange... ranges) { if (ranges.length < 2) { return ranges[0]; } // Check the type of the first range if (!(ranges[0] instanceof VersionRange)) { if (!(ranges[0] instanceof OrRange)) { throw new UnsupportedOperationException("Incorrect type for ranges[0]"); } if (((OrRange) ranges[0]).size() != 2) { throw new UnsupportedOperationException("Incorrect size for ranges[0]"); } } else { if (!((VersionRange) ranges[0]).isUnbounded()) { throw new UnsupportedOperationException("ranges[0] should be unbounded (> or >=)"); } } int lastIndex = ranges.length - 1; // Check the type of the last range if (!(ranges[lastIndex] instanceof VersionRange)) { if (!(ranges[lastIndex] instanceof OrRange)) { throw new UnsupportedOperationException("Incorrect type for ranges[last]"); } if (((OrRange) ranges[lastIndex]).size() != 2) { throw new UnsupportedOperationException("Incorrect size for ranges[last]"); } } else { if (((VersionRange) ranges[lastIndex]).isUnbounded()) { throw new UnsupportedOperationException("ranges[0] should be bounded (< or <=)"); } } // Check the rest of the types for (int i = 1; i < lastIndex; i++) { if (!(ranges[i] instanceof OrRange)) { throw new UnsupportedOperationException("Incorrect type for ranges[" + i + "]"); } if (((OrRange) ranges[i]).size() != 2) { throw new UnsupportedOperationException("Incorrect size for ranges[" + i + "]"); } } List<IVersionRange> results = new LinkedList<IVersionRange>(); IVersionRange last = null; for (int i = 0; i < ranges.length; i++) { IVersionRange range = ranges[i]; if (last == null) { if (range instanceof VersionRange) { last = range; } else { OrRange orange = (OrRange) range; results.add(orange.first()); last = orange.last(); } } else { if (range instanceof VersionRange) { AndRange arange = new AndRange(last, range); results.add(arange); last = null; } else { OrRange orange = (OrRange) range; AndRange arange = new AndRange(last, orange.first()); results.add(arange); last = orange.last(); } } } if (last != null) { results.add(last); } return new OrRange(results); }
java
public String printHelixSummary() { StringBuffer g = new StringBuffer(); //3-10 helix StringBuffer h = new StringBuffer(); //alpha helix StringBuffer i = new StringBuffer(); //pi-helix StringBuffer ss = new StringBuffer(); //SS summary StringBuffer aa = new StringBuffer(); //AA one-letter String nl = System.getProperty("line.separator"); g.append( "3 turn: "); h.append( "4 turn: "); i.append( "5 turn: "); ss.append( "SS: "); aa.append( "AA: "); for (int k = 0; k < groups.length; k++){ SecStrucState state = getSecStrucState(k); g.append(state.getTurn()[0]); h.append(state.getTurn()[1]); i.append(state.getTurn()[2]); ss.append(state.getType()); aa.append(StructureTools.get1LetterCode(groups[k].getPDBName())); } return g.toString()+nl+h.toString()+nl+ i.toString()+nl+ss.toString()+nl+aa.toString(); }
java
public boolean setColumnValue(int iColumnIndex, Object value, boolean bDisplay, int iMoveMode) { Convert fieldInfo = this.getFieldInfo(iColumnIndex); if (fieldInfo != null) { Object dataBefore = fieldInfo.getData(); if (!(value instanceof String)) fieldInfo.setData(value, bDisplay, iMoveMode); else fieldInfo.setString((String)value, bDisplay, iMoveMode); Object dataAfter = fieldInfo.getData(); if (dataBefore == null) return (dataAfter != null); else return (!dataBefore.equals(dataAfter)); } return false; }
java
public static List<String> splitAsList(String text, String delimiter) { List<String> answer = new ArrayList<String>(); if (text != null && text.length() > 0) { answer.addAll(Arrays.asList(text.split(delimiter))); } return answer; }
python
def show_weights(self, **kwargs): """ Call :func:`eli5.show_weights` for the locally-fit classification pipeline. Keyword arguments are passed to :func:`eli5.show_weights`. :func:`fit` must be called before using this method. """ self._fix_target_names(kwargs) return eli5.show_weights(self.clf_, vec=self.vec_, **kwargs)
java
public void scale(int scale){ parameterSize *= scale; updaterStateSize *= scale; workingMemoryFixedInference *= scale; workingMemoryVariableInference *= scale; cacheModeMemFixed = scaleEntries(cacheModeMemFixed, scale); cacheModeMemVariablePerEx = scaleEntries(cacheModeMemVariablePerEx, scale); }
java
public static ArrayList<String> fixRequiredByFeature(String fixApar, Map<String, ProvisioningFeatureDefinition> installedFeatures) { ArrayList<String> dependencies = new ArrayList<String>(); for (ProvisioningFeatureDefinition fd : installedFeatures.values()) { String requireFixes = fd.getHeader("IBM-Require-Fix"); if (requireFixes != null && requireFixes.length() > 0) { String[] apars = requireFixes.split(";"); for (String apar : apars) { if (apar.trim().equals(fixApar.trim())) { dependencies.add(apar); } } } } if (dependencies.isEmpty()) return null; return dependencies; }
python
def add(self, filename, raw_data=None, dx=None): """ Generic method to add a file to the session. This is the main method to use when adding files to a Session! If an APK file is supplied, all DEX files are analyzed too. For DEX and ODEX files, only this file is analyzed (what else should be analyzed). Returns the SHA256 of the analyzed file. :param filename: filename to load :param raw_data: bytes of the file, or None to load the file from filename :param dx: An already exiting :class:`~androguard.core.analysis.analysis.Analysis` object :return: the sha256 of the file or None on failure """ if not raw_data: log.debug("Loading file from '{}'".format(filename)) with open(filename, "rb") as fp: raw_data = fp.read() ret = androconf.is_android_raw(raw_data) log.debug("Found filetype: '{}'".format(ret)) if not ret: return None if ret == "APK": digest, _ = self.addAPK(filename, raw_data) elif ret == "DEX": digest, _, _ = self.addDEX(filename, raw_data, dx) elif ret == "DEY": digest, _, _ = self.addDEY(filename, raw_data, dx) else: return None return digest
python
def ethnicities_clean(): """ Get dictionary of unformatted ethnicity types mapped to clean corresponding ethnicity strings """ eths_clean = {} fname = pkg_resources.resource_filename(__name__, 'resources/Ethnicity_Groups.csv') with open(fname, 'rU') as csvfile: reader = csv.reader(csvfile, delimiter = ',') first = [] for row in reader: if first: for i in range(len(first)): if first[i] and row[i]: eths_clean[first[i]] = row[i] first = [] else: first = deepcopy(row) return eths_clean
java
public static <T extends CharSequence> T checkNotEmpty(final T reference) { if (TextUtils.isEmpty(reference)) { throw new IllegalArgumentException(); } return reference; }
java
private synchronized String publishObject(KeenProject project, URL url, final Map<String, ?> requestData) throws IOException { if (requestData == null || requestData.size() == 0) { KeenLogging.log("No API calls were made because there were no events to upload"); return null; } // Build an output source which simply writes the serialized JSON to the output. OutputSource source = new OutputSource() { @Override public void writeTo(OutputStream out) throws IOException { OutputStreamWriter writer = new OutputStreamWriter(out, ENCODING); jsonHandler.writeJson(writer, requestData); } }; // If logging is enabled, log the request being sent. if (KeenLogging.isLoggingEnabled()) { try { StringWriter writer = new StringWriter(); jsonHandler.writeJson(writer, requestData); String request = writer.toString(); KeenLogging.log(String.format(Locale.US, "Sent request '%s' to URL '%s'", request, url.toString())); } catch (IOException e) { KeenLogging.log("Couldn't log event written to file: ", e); } } // Send the request. String writeKey = project.getWriteKey(); Request request = new Request(url, HttpMethods.POST, writeKey, source, proxy, connectTimeout, readTimeout); Response response = httpHandler.execute(request); // If logging is enabled, log the response. if (KeenLogging.isLoggingEnabled()) { KeenLogging.log(String.format(Locale.US, "Received response: '%s' (%d)", response.body, response.statusCode)); } // If the request succeeded, return the response body. Otherwise throw an exception. if (response.isSuccess()) { return response.body; } else { throw new ServerException(response.body); } }
python
def _create_alt_equals_ref_noncds(self): """Create an alt seq that matches the reference (for non-cds variants)""" alt_data = AltTranscriptData( list(self._transcript_data.transcript_sequence), self._transcript_data.cds_start, self._transcript_data.cds_stop, False, None, self._transcript_data.protein_accession, is_ambiguous=True) return alt_data
python
def score_large_straight_yatzy(dice: List[int]) -> int: """ Large straight scoring according to yatzy rules """ dice_set = set(dice) if _are_two_sets_equal({2, 3, 4, 5, 6}, dice_set): return sum(dice) return 0
python
def get_question_passers(self, number: str) -> list: """ ๅ–ๅพ—่ชฒ็จ‹ไธญ็‰นๅฎš้กŒ็›ฎ้€š้Ž่€…ๅˆ—่กจ """ try: # ๆ“ไฝœๆ‰€้œ€่ณ‡่จŠ params = { 'HW_ID': number } # ๅ–ๅพ—่ณ‡ๆ–™ response = self.__session.get( self.__url + '/success.jsp', params=params, timeout=0.5, verify=False) soup = BeautifulSoup(response.text, 'html.parser') # ๆ•ด็†้€š้Ž่€…่ณ‡่จŠ passers = [] for tag in soup.find_all('tr'): # ๅ–ๅพ—้€š้Ž่€…ๅญธ่™Ÿ passer = tag.get_text().replace('\n', '').strip() # ่ทณ้Žๆจ™้กŒๅˆ— if passer != 'ๅญธ่™Ÿ': passers.append(passer) # ๅ›žๅ‚ณ็ตๆžœ return passers except requests.exceptions.Timeout: return ["Timeout"]
java
private void initializeFragmentSwitcher() { mFragmentSwitcher = (FragmentSwitcher) findViewById(R.id.fragment_switcher); mFragmentAdapter = new FragmentStateArrayPagerAdapter(getSupportFragmentManager()); mFragmentSwitcher.setAdapter(mFragmentAdapter); }
java
public void fill(List<CmsBrokenLinkBean> brokenLinkBeans) { for (CmsBrokenLinkBean brokenLinkBean : brokenLinkBeans) { m_linkPanel.add(createTreeItem(brokenLinkBean)); } }
java
@Override public Map<String, Object> toMap() { Map<String, Object> map = C.newMap(); for (Map.Entry<String, ValueObject> entry : entrySet()) { map.put(entry.getKey(), entry.getValue().value()); } return map; }
python
def url_to_fn(url): """ Convert `url` to filename used to download the datasets. ``http://kitakitsune.org/xe`` -> ``kitakitsune.org_xe``. Args: url (str): URL of the resource. Returns: str: Normalized URL. """ url = url.replace("http://", "").replace("https://", "") url = url.split("?")[0] return url.replace("%", "_").replace("/", "_")
python
def ops_entity(self, ops): ''' Returns a new multi-op entity name string that represents all the given operations and caveats. It returns the same value regardless of the ordering of the operations. It assumes that the operations have been canonicalized and that there's at least one operation. :param ops: :return: string that represents all the given operations and caveats. ''' # Hash the operations, removing duplicates as we go. hash_entity = hashlib.sha256() for op in ops: hash_entity.update('{}\n{}\n'.format( op.action, op.entity).encode()) hash_encoded = base64.urlsafe_b64encode(hash_entity.digest()) return 'multi-' + hash_encoded.decode('utf-8').rstrip('=')
java
public void setApplicationId(String applicationId) { // Set the application id as a GeoPackage int applicationIdInt = ByteBuffer.wrap(applicationId.getBytes()) .asIntBuffer().get(); execSQL(String.format("PRAGMA application_id = %d;", applicationIdInt)); }
java
@Override public Object getLock() { ResourceSet resourceSet = this.getResourceSet(); if ((resourceSet instanceof ISynchronizable<?>)) { return ((ISynchronizable<?>) resourceSet).getLock(); } return this; }
python
def read_graph(filename, directed=False, weighted=False, default_weight=None): """Read a graph from a text file :param filename: plain text file. All numbers are separated by space. Starts with a line containing n (#vertices) and m (#edges). Then m lines follow, for each edge. Vertices are numbered from 0 to n-1. Line for unweighted edge u,v contains two integers u, v. Line for weighted edge u,v contains three integers u, v, w[u,v]. :param directed: true for a directed graph, false for undirected :param weighted: true for an edge weighted graph :returns: graph in listlist format, possibly followed by weight matrix :complexity: O(n + m) for unweighted graph, :math:`O(n^2)` for weighted graph """ with open(filename, 'r') as f: while True: line = f.readline() # ignore leading comments if line[0] != '#': break nb_nodes, nb_edges = tuple(map(int, line.split())) graph = [[] for u in range(nb_nodes)] if weighted: weight = [[default_weight] * nb_nodes for v in range(nb_nodes)] for v in range(nb_nodes): weight[v][v] = 0 for _ in range(nb_edges): u, v, w = readtab(f, int) graph[u].append(v) weight[u][v] = w if not directed: graph[v].append(u) weight[v][u] = w return graph, weight else: for _ in range(nb_edges): # si le fichier contient des poids, ils seront ignorรฉs u, v = readtab(f, int)[:2] graph[u].append(v) if not directed: graph[v].append(u) return graph
java
public static base_responses create(nitro_service client, dnskey resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { dnskey createresources[] = new dnskey[resources.length]; for (int i=0;i<resources.length;i++){ createresources[i] = new dnskey(); createresources[i].zonename = resources[i].zonename; createresources[i].keytype = resources[i].keytype; createresources[i].algorithm = resources[i].algorithm; createresources[i].keysize = resources[i].keysize; createresources[i].filenameprefix = resources[i].filenameprefix; } result = perform_operation_bulk_request(client, createresources,"create"); } return result; }
python
def ParseGroupEntry(self, line): """Extract the members of a group from /etc/group.""" fields = ("name", "passwd", "gid", "members") if line: rslt = dict(zip(fields, line.split(":"))) name = rslt["name"] group = self.entry.setdefault(name, rdf_client.Group(name=name)) group.pw_entry.store = self.GetPwStore(rslt["passwd"]) if group.pw_entry.store == self.base_store: group.pw_entry.hash_type = self.GetHashType(rslt["passwd"]) # If the group contains NIS entries, they may not have a gid. if rslt["gid"]: group.gid = int(rslt["gid"]) group.members = set(rslt["members"].split(","))
java
public Solution getSolution(String id) { for( Solution s : Solution.all() ) if(s.id.equals(id)) return s; return null; }
java
public static String toGetterName(JavacNode field) { return HandlerUtil.toGetterName(field.getAst(), getAccessorsForField(field), field.getName(), isBoolean(field)); }
python
def _set_title(self, item): """ attempt to set title from wikidata """ title = None lang = self.params['lang'] label = self.data['label'] if item.get('sitelinks'): for link in item['sitelinks']: if link == "%swiki" % lang: title = item['sitelinks'][link]['title'] self.data['title'] = title.replace(' ', '_') if not self.data.get('title') and label: self.data['title'] = label.replace(' ', '_') if self.data.get('title') and not self.params.get('title'): self.params['title'] = self.data['title']
java
public Object getNearObject(Object farObject) { Object nearObject = null; if (farObject instanceof BridgeFacet) { BridgeFacet facet = (BridgeFacet) farObject; if (facet.hasBridgeFacets()) { BridgeFacets facets = facet.getBridgeFacets(); if (facets.hasNearObject()) { nearObject = nearType.cast(facets.getNearObject()); } } } if (nearObject == null) { if (!farType.isInstance(farObject)) { RuntimeException exception = new ClassCastException("Class: " + display(farType) + ": object: " + display(farObject)); if (logger.isTraceEnabled()) { logger.trace("Far type: " + display(farType), exception); } throw exception; } Class<?>[] interfaces = new Class[] { BridgeFacet.class }; final BridgeFacets facets; if (farObject instanceof BaseBridgeFacet && ((BaseBridgeFacet) farObject).hasBridgeFacets()) { facets = ((BaseBridgeFacet) farObject).getBridgeFacets(); } else { Map<Integer,Object> helperInstances = new HashMap<>(); for (int i = 0; i < helpers.length; i++) { Object helper = helpers[i]; if (helper instanceof Class) { Class<?> helperType = (Class<?>) helper; try { Object helperInstance = helperType.newInstance(); helperInstances.put(i, helperInstance); } catch (Throwable throwable) { throw ExceptionWrapper.wrap(throwable); } } } facets = new BridgeFacets(farType.cast(farObject), this, helperInstances); for (Object helperInstance : helperInstances.values()) { if (helperInstance instanceof BaseBridgeFacet) { ((BaseBridgeFacet) helperInstance).setBridgeFacets(facets); } } } if (farObject instanceof BaseBridgeFacet && !((BaseBridgeFacet) farObject).hasBridgeFacets()) { ((BaseBridgeFacet) farObject).setBridgeFacets(facets); } nearObject = newProxyInstance(getClass().getClassLoader(), nearType, interfaces, (object, method, parameters) -> { Object result; if (logger.isTraceEnabled()) { logger.trace("Facets getter: " + display(facetsGetter)); } if (method.equals(facetsGetter)) { result = facets; } else if (method.equals(facetsChecker)) { result = Boolean.TRUE; } else { Binding binding = methodMap.get(method); if (binding == null) { String message = "No binding found: " + display(nearType) + ": " + display(method); logger.warn(message); throw new UnsupportedOperationException(message); } result = binding.invoke(facets, parameters); Class<?> returnType = method.getReturnType(); if (logger.isTraceEnabled()) { logger.trace("Return type: " + displayWithTypes(returnType)); } if (result instanceof TypedIterable) { TypedIterable<?> typedIterable = (TypedIterable<?>) result; Class<?> baseType = typedIterable.getType(); if (logger.isTraceEnabled()) { logger.trace("Iterable base type: " + display(baseType)); } if (registry.hasFarType(baseType)) { Class<?> type = getNearType(baseType); result = getAdaptedIterable(returnType, typedIterable, type); } else if (registry.hasNearType(baseType)) { result = getAdaptedIterable(returnType, typedIterable, baseType); } else if (logger.isTraceEnabled()) { logger.trace("Iterable base type not found in registry"); } } else if (registry.hasNearType(returnType)) { Class<?> farType1 = registry.getFarType(method.getReturnType()); if (logger.isTraceEnabled()) { logger.trace("Far type of return type: " + display(farType1)); } if (farType1.isInstance(result)) { BridgeFactory factory = registry.getBridgeFactory(farType1); if (logger.isTraceEnabled()) { logger.trace("Far type of return type factory: " + display(factory.getFarType())); logger.trace("Result: " + display(result)); logger.trace("Return type: " + display(returnType) + ": factory PO type: " + display(factory.getNearType())); } try { result = factory.getNearObject(result); } catch (ClassCastException exception) { if (logger.isTraceEnabled()) { logger.trace("Stack trace", exception); } throw exception; } } } else if (logger.isTraceEnabled()) { logger.trace("Not adapted"); } } return result; }); facets.setNearObject(nearObject); } return nearObject; }
java
public KerasModelBuilder modelHdf5Filename(String modelHdf5Filename) throws UnsupportedKerasConfigurationException, InvalidKerasConfigurationException, IOException { checkForExistence(modelHdf5Filename); synchronized (Hdf5Archive.LOCK_OBJECT) { try { this.weightsArchive = this.trainingArchive = new Hdf5Archive(modelHdf5Filename); this.weightsRoot = config.getTrainingWeightsRoot(); if (!this.weightsArchive.hasAttribute(config.getTrainingModelConfigAttribute())) throw new InvalidKerasConfigurationException( "Model configuration attribute missing from " + modelHdf5Filename + " archive."); String initialModelJson = this.weightsArchive.readAttributeAsJson( config.getTrainingModelConfigAttribute()); String kerasVersion = this.weightsArchive.readAttributeAsFixedLengthString( config.getFieldKerasVersion(), 5); Map<String, Object> modelMapper = KerasModelUtils.parseJsonString(initialModelJson); modelMapper.put(config.getFieldKerasVersion(), kerasVersion); int majorKerasVersion = Character.getNumericValue(kerasVersion.charAt(0)); if (majorKerasVersion == 2) { String backend = this.weightsArchive.readAttributeAsString(config.getFieldBackend()); modelMapper.put(config.getFieldBackend(), backend); } this.modelJson = new ObjectMapper().writeValueAsString(modelMapper); if (this.trainingArchive.hasAttribute(config.getTrainingTrainingConfigAttribute())) this.trainingJson = this.trainingArchive .readAttributeAsJson(config.getTrainingTrainingConfigAttribute()); } catch (Throwable t) { close(); throw t; } } return this; }
java
public static Date parseDate(String str, Locale locale, String... parsePatterns) throws ParseException { return parseDateWithLeniency(str, locale, parsePatterns, true); }
python
def _send_result_to_redis(self, result): """Sends the result of a poll to redis to be used potentially by another process @param result: the result retrieved from kafka""" if self.redis_connected: self.logger.debug("Sending result to redis") try: key = "rest:poll:{u}".format(u=result['uuid']) self.redis_conn.set(key, json.dumps(result)) except ConnectionError: self.logger.error("Lost connection to Redis") self._spawn_redis_connection_thread() else: self.logger.warning("Unable to send result to redis, not connected")
python
def lock_reward(self, agreement_id, amount, account): """ Lock reward condition. :param agreement_id: id of the agreement, hex str :param amount: Amount of tokens, int :param account: Account :return: bool """ return self._keeper.lock_reward_condition.fulfill( agreement_id, self._keeper.escrow_reward_condition.address, amount, account )
java
public Object addMenu(ScreenFieldView sfView) { char rgchShortcuts[] = new char[30]; Object menubar = sfView.createMenu(); sfView.addStandardMenu(menubar, MenuConstants.FILE, rgchShortcuts); sfView.addStandardMenu(menubar, MenuConstants.EDIT, rgchShortcuts); Object menu = null; menu = sfView.addStandardMenu(menubar, MenuConstants.RECORD, rgchShortcuts); //?menu.addSeparator(); sfView.addMenuItem(menu, this.getString(MenuConstants.REQUERY)); //+ menuItem.setIcon(this.loadImageIcon(MenuConstants.REQUERY, null)); sfView.addStandardMenu(menubar, MenuConstants.HELP, rgchShortcuts); return menubar; }
java
protected void throwJsonParameterParseFailureException(Object bean, String name, String json, Class<?> propertyType, RuntimeException e) { final StringBuilder sb = new StringBuilder(); sb.append("Cannot parse json of the request parameter."); final Map<String, Object> retryMap = retryJsonAsMapForDebug(json); final List<JsonDebugChallenge> challengeList = prepareJsonParameterDebugChallengeList(retryMap, propertyType, json, null); final String challengeDisp = buildJsonParameterDebugChallengeDisp(challengeList); buildClientErrorHeader(sb, "JsonParameter Parse Failure", bean, name, json, propertyType, challengeDisp); throwRequestJsonParseFailureException(sb.toString(), challengeList, e); }
python
def count(self, query=None, date=None): ''' Run a query on the given cube and return only the count of resulting matches. :param query: The query in pql :param date: date (metrique date range) that should be queried If date==None then the most recent versions of the objects will be queried. :param collection: cube name :param owner: username of cube owner ''' return self.proxy.count(table=self.name, query=query, date=date)
java
private VarTensor calcFactorBeliefs(Factor factor) { if (factor instanceof GlobalFactor) { log.warn("Getting marginals of a global factor is not supported." + " This will require exponential space to store the resulting factor." + " This should only be used for testing."); } // Compute the product of all messages sent to this factor. VarTensor prod = safeNewVarTensor(factor); calcProductAtFactor(factor.getId(), prod, -1, -1); return prod; }