language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
@Override
public java.util.concurrent.Future<CreateStreamResult> createStreamAsync(String streamName, Integer shardCount) {
return createStreamAsync(new CreateStreamRequest().withStreamName(streamName).withShardCount(shardCount));
}
|
java
|
public static void unregister(String ID) {
displayNameCache.remove(new CaseInsensitiveString(ID));
registry.remove(ID);
}
|
python
|
def _find_title(html_file):
"""Finds the <title> for the given HTML file, or (Unknown)."""
# TODO Is it necessary to read files like this?
with html_file.open() as f:
for line in f:
if '<title>' in line:
# + 7 to skip len('<title>')
return line[line.index('<title>') + 7:line.index('</title>')]
return '(Unknown)'
|
java
|
protected List<MavenArtifact> fetchMavenArtifacts(Dependency dependency) throws IOException {
IOException lastException = null;
long sleepingTimeBetweenRetriesInMillis = BASE_RETRY_WAIT;
int triesLeft = numberOfRetries;
while (triesLeft-- > 0) {
try {
return searcher.searchSha1(dependency.getSha1sum());
} catch (FileNotFoundException fnfe) {
// retry does not make sense, just throw the exception
throw fnfe;
} catch (IOException ioe) {
LOGGER.debug("Could not connect to Central search (tries left: {}): {}",
triesLeft, ioe.getMessage());
lastException = ioe;
if (triesLeft > 0) {
try {
Thread.sleep(sleepingTimeBetweenRetriesInMillis);
sleepingTimeBetweenRetriesInMillis *= 2;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
final String message = "Finally failed connecting to Central search."
+ " Giving up after " + numberOfRetries + " tries.";
throw new IOException(message, lastException);
}
|
java
|
private static long parseDuration(String value) throws ParseException {
// Must ended with "s".
if (value.isEmpty() || value.charAt(value.length() - 1) != 's') {
throw new ParseException("Invalid duration string: " + value, 0);
}
boolean negative = false;
if (value.charAt(0) == '-') {
negative = true;
value = value.substring(1);
}
String secondValue = value.substring(0, value.length() - 1);
String nanoValue = "";
int pointPosition = secondValue.indexOf('.');
if (pointPosition != -1) {
nanoValue = secondValue.substring(pointPosition + 1);
secondValue = secondValue.substring(0, pointPosition);
}
long seconds = Long.parseLong(secondValue);
int nanos = nanoValue.isEmpty() ? 0 : parseNanos(nanoValue);
if (seconds < 0) {
throw new ParseException("Invalid duration string: " + value, 0);
}
if (negative) {
seconds = -seconds;
nanos = -nanos;
}
try {
return normalizedDuration(seconds, nanos);
} catch (IllegalArgumentException e) {
throw new ParseException("Duration value is out of range.", 0);
}
}
|
java
|
public void setIntProperty( String key , int value )
{
settings.setProperty(key, String.valueOf(value));
}
|
python
|
def add_tasks(self, pcs_files, dirname=''):
'''建立批量下载任务, 包括目录'''
def on_list_dir(info, error=None):
path, pcs_files = info
if error or not pcs_files:
dialog = Gtk.MessageDialog(self.app.window,
Gtk.DialogFlags.MODAL,
Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE,
_('Failed to scan folder to download'))
dialog.format_secondary_text(
_('Please download {0} again').format(path))
dialog.run()
dialog.destroy()
return
self.add_tasks(pcs_files, dirname)
self.check_first()
for pcs_file in pcs_files:
if pcs_file['isdir']:
gutil.async_call(pcs.list_dir_all, self.app.cookie,
self.app.tokens, pcs_file['path'],
callback=on_list_dir)
else:
self.add_task(pcs_file, dirname)
self.check_commit(force=True)
|
java
|
@Nonnull
public static byte[] encode( final long maxInactiveInterval, final long lastAccessedTime, final long thisAccessedTime ) {
int idx = 0;
final byte[] data = new byte[ 4 + 2 * 8 ];
encodeNum( maxInactiveInterval, data, idx, 4 );
encodeNum( lastAccessedTime, data, idx += 4, 8 );
encodeNum( thisAccessedTime, data, idx += 8, 8 );
return data;
}
|
python
|
def GetMessage(self, log_source, lcid, message_identifier):
"""Retrieves a specific message for a specific Event Log source.
Args:
log_source (str): Event Log source.
lcid (int): language code identifier (LCID).
message_identifier (int): message identifier.
Returns:
str: message string or None if not available.
"""
event_log_provider_key = self._GetEventLogProviderKey(log_source)
if not event_log_provider_key:
return None
generator = self._GetMessageFileKeys(event_log_provider_key)
if not generator:
return None
# TODO: cache a number of message strings.
message_string = None
for message_file_key in generator:
message_string = self._GetMessage(
message_file_key, lcid, message_identifier)
if message_string:
break
if self._string_format == 'wrc':
message_string = self._ReformatMessageString(message_string)
return message_string
|
python
|
def load(self, ):
"""If the reference is in the scene but unloaded, load it.
.. Note:: Do not confuse this with reference or import. Load means that it is already referenced.
But the data from the reference was not read until now. Load loads the data from the reference.
This will call :meth:`RefobjInterface.load` and set the status to :data:`Reftrack.LOADED`.
:returns: None
:rtype: None
:raises: :class:`ReftrackIntegrityError`
"""
assert self.status() == self.UNLOADED,\
"Cannot load if there is no unloaded reference. Use reference instead."
self.get_refobjinter().load(self._refobj)
self.set_status(self.LOADED)
self.fetch_new_children()
self.update_restrictions()
self.emit_data_changed()
|
java
|
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
LOG.info("POST from address: " + req.getRemoteAddr() + " Values trying to set: "
+ mapToString(req.getParameterMap()));
PrintWriter out = resp.getWriter();
Reconfigurable reconf = getReconfigurable(req);
String nodeName = reconf.getClass().getCanonicalName();
printHeader(out, nodeName);
try {
applyChanges(out, reconf, req);
} catch (ReconfigurationException e) {
resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
StringUtils.stringifyException(e));
return;
}
out.println("<p><a href=\"" + req.getServletPath() + "\">back</a></p>");
printFooter(out);
}
|
java
|
public synchronized void stop(boolean graceful)
throws InterruptedException
{
boolean ov=_gracefulStop;
try
{
_gracefulStop=graceful;
stop();
}
finally
{
_gracefulStop=ov;
}
}
|
python
|
def base_path(self):
"""Base absolute path of container."""
return os.path.join(self.container.base_path, self.name)
|
java
|
private boolean detectLogFromBugsnag(Throwable throwable) {
// Check all places that LOGGER is called with an exception in the Bugsnag library
for (StackTraceElement element : throwable.getStackTrace()) {
for (String excludedClass : EXCLUDED_CLASSES) {
if (element.getClassName().startsWith(excludedClass)) {
return true;
}
}
}
return false;
}
|
java
|
protected void configureWebApp () throws Exception
{
//finally, finish configuring the webapp
for (int i=0;i<_configurations.length;i++)
{
_configurations[i].setWebApplicationContext(this);
_configurations[i].configureWebApp();
}
}
|
java
|
private static String createNewKey(final List<String> _names,
final Class<?> _bundleclass)
throws EFapsException
{
final StringBuilder builder = new StringBuilder();
final List<String> oids = new ArrayList<>();
String ret = null;
try {
for (final String name : _names) {
if (builder.length() > 0) {
builder.append("-");
}
final Cache<String, StaticCompiledSource> cache = InfinispanCache.get()
.<String, StaticCompiledSource>getIgnReCache(BundleMaker.NAMECACHE);
if (!cache.containsKey(name)) {
final QueryBuilder queryBldr = new QueryBuilder(CIAdminProgram.StaticCompiled);
queryBldr.addWhereAttrEqValue(CIAdminProgram.StaticCompiled.Name, name);
final MultiPrintQuery multi = queryBldr.getPrint();
multi.addAttribute(CIAdminProgram.StaticCompiled.Name);
multi.execute();
while (multi.next()) {
final String statName = multi.<String>getAttribute(CIAdminProgram.StaticCompiled.Name);
final StaticCompiledSource source = new StaticCompiledSource(multi.getCurrentInstance()
.getOid(),
statName);
cache.put(source.getName(), source);
}
}
if (cache.containsKey(name)) {
final String oid = cache.get(name).getOid();
builder.append(oid);
oids.add(oid);
}
}
ret = builder.toString();
final BundleInterface bundle =
(BundleInterface) _bundleclass.newInstance();
bundle.setKey(ret, oids);
final Cache<String, BundleInterface> cache = InfinispanCache.get()
.<String, BundleInterface>getIgnReCache(BundleMaker.CACHE4BUNDLE);
cache.put(ret, bundle);
} catch (final InstantiationException e) {
throw new EFapsException(BundleMaker.class,
"createNewKey.InstantiationException", e, _bundleclass);
} catch (final IllegalAccessException e) {
throw new EFapsException(BundleMaker.class,
"createNewKey.IllegalAccessException", e, _bundleclass);
}
return ret;
}
|
python
|
def dim_extents(self, *args, **kwargs):
"""
Returns extent tuples of the dimensions in args.
.. code-block:: python
(tl, tu), (bl, bu) = cube.dim_extents('ntime', 'nbl')
or
.. code-block:: python
(tl, tu), (bl, bu) = cube.dim_upper_extent('ntime,nbl')
"""
l = self.dim_lower_extent(*args, **kwargs)
u = self.dim_upper_extent(*args, **kwargs)
# Handle sequence and singles differently
if isinstance(l, collections.Sequence):
return zip(l, u)
else:
return (l, u)
|
java
|
public static <T, B extends ThreadLocalBuilder<T>> T build(
final Class<B> threadLocalBuilderClass,
final Consumer<B> builderConsumer) {
return buildGeneric(threadLocalBuilderClass, builderConsumer);
}
|
python
|
def weibull(target, seeds, shape, scale, loc):
r"""
Produces values from a Weibull distribution given a set of random numbers.
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
seeds : string, optional
The dictionary key on the Geometry object containing random seed values
(between 0 and 1) to use in the statistical distribution.
shape : float
This controls the skewness of the distribution, with 'shape' < 1 giving
values clustered on the low end of the range with a long tail, and
'shape' > 1 giving a more symmetrical distribution.
scale : float
This controls the width of the distribution with most of values falling
below this number.
loc : float
Applies an offset to the distribution such that the smallest values are
above this number.
Examples
--------
The following code illustrates the inner workings of this function,
which uses the 'weibull_min' method of the scipy.stats module. This can
be used to find suitable values of 'shape', 'scale'` and 'loc'. Note that
'shape' is represented by 'c' in the actual function call.
>>> import scipy
>>> func = scipy.stats.weibull_min(c=1.5, scale=0.0001, loc=0)
>>> import matplotlib.pyplot as plt
>>> fig = plt.hist(func.ppf(q=scipy.rand(10000)), bins=50)
"""
seeds = target[seeds]
value = spts.weibull_min.ppf(q=seeds, c=shape, scale=scale, loc=loc)
return value
|
java
|
public Observable<Page<AvailabilitySetInner>> listByResourceGroupAsync(String resourceGroupName) {
return listByResourceGroupWithServiceResponseAsync(resourceGroupName).map(new Func1<ServiceResponse<List<AvailabilitySetInner>>, Page<AvailabilitySetInner>>() {
@Override
public Page<AvailabilitySetInner> call(ServiceResponse<List<AvailabilitySetInner>> response) {
PageImpl<AvailabilitySetInner> page = new PageImpl<>();
page.setItems(response.body());
return page;
}
});
}
|
java
|
public void register(DescribeService service, String describeService) throws POIProxyException {
serviceManager.registerServiceConfiguration(service.getId(), describeService, service);
}
|
java
|
public boolean containsKey(Object key) {
if (key == null) {
return false;
}
Entry tab[] = mTable;
int hash = System.identityHashCode(key);
int index = (hash & 0x7FFFFFFF) % tab.length;
for (Entry e = tab[index], prev = null; e != null; e = e.mNext) {
Object entryKey = e.getKey();
if (entryKey == null) {
// Clean up after a cleared Reference.
mModCount++;
if (prev != null) {
prev.mNext = e.mNext;
}
else {
tab[index] = e.mNext;
}
mCount--;
}
else if (e.mHash == hash && key == entryKey) {
return true;
}
else {
prev = e;
}
}
return false;
}
|
java
|
@Override
public IClasspathEntry[] getClasspathEntries() {
// TODO Auto-generated method stub
List<IClasspathEntry> libraries = Activator.getDefault().libraries();
return libraries.toArray(new IClasspathEntry[libraries.size()]);
}
|
python
|
def to_array(self):
"""
Serializes this SuccessfulPayment to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(SuccessfulPayment, self).to_array()
array['currency'] = u(self.currency) # py2: type unicode, py3: type str
array['total_amount'] = int(self.total_amount) # type int
array['invoice_payload'] = u(self.invoice_payload) # py2: type unicode, py3: type str
array['telegram_payment_charge_id'] = u(self.telegram_payment_charge_id) # py2: type unicode, py3: type str
array['provider_payment_charge_id'] = u(self.provider_payment_charge_id) # py2: type unicode, py3: type str
if self.shipping_option_id is not None:
array['shipping_option_id'] = u(self.shipping_option_id) # py2: type unicode, py3: type str
if self.order_info is not None:
array['order_info'] = self.order_info.to_array() # type OrderInfo
return array
|
java
|
public SDVariable layerNorm(SDVariable input, SDVariable gain, int... dimensions) {
return layerNorm((String)null, input, gain, dimensions);
}
|
python
|
def _write_reads(reads, prefix):
"""
Write fasta file, ma file and real position
"""
out_ma = prefix + ".ma"
out_fasta = prefix + ".fasta"
out_real = prefix + ".txt"
with open(out_ma, 'w') as ma_handle:
print("id\tseq\tsample", file=ma_handle, end="")
with open(out_fasta, 'w') as fa_handle:
with open(out_real, 'w') as read_handle:
for idx, r in enumerate(reads):
info = r.split("_")
print("seq_%s\t%s\t%s" % (idx, reads[r][0], reads[r][1]), file=ma_handle, end="")
print(">seq_%s\n%s" % (idx, reads[r][0]), file=fa_handle, end="")
print("%s\t%s\t%s\t%s\t%s\t%s\t%s" % (idx, r, reads[r][0], reads[r][1], info[1], info[2], info[3]), file=read_handle, end="")
|
java
|
public Set<Map.Entry<K,V>> entrySet() {
Set<Map.Entry<K,V>> es = entrySet;
return es != null ? es : (entrySet = new EntrySet());
}
|
java
|
protected boolean startAuthentication(final C context, final List<Client> currentClients) {
return isNotEmpty(currentClients) && currentClients.get(0) instanceof IndirectClient;
}
|
java
|
@Override
public List<SAXParseException> validateWithErrors(final File f)
throws SAXException, IOException {
final Validator errorValidator = createNewErrorValidator();
errorValidator.validate(utf8SourceForFile(f), null);
return ((Handler) errorValidator.getErrorHandler()).exceptions;
}
|
java
|
public static String methodToEventName(ExecutableElement method) {
Emit emitAnnotation = method.getAnnotation(Emit.class);
if (!"".equals(emitAnnotation.value())) {
return emitAnnotation.value();
}
return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_HYPHEN, method.getSimpleName().toString())
.toLowerCase();
}
|
java
|
public static String replaceVariables( final VariableResolver pResolver, final String pExpression, final String pOpen, final String pClose ) {
final char[] open = pOpen.toCharArray();
final char[] close = pClose.toCharArray();
final StringBuilder out = new StringBuilder();
StringBuilder sb = new StringBuilder();
char[] last = null;
int wo = 0;
int wc = 0;
int level = 0;
for (char c : pExpression.toCharArray()) {
if (c == open[wo]) {
if (wc > 0) {
sb.append(close, 0, wc);
}
wc = 0;
wo++;
if (open.length == wo) {
// found open
if (last == open) {
out.append(open);
}
level++;
out.append(sb);
sb = new StringBuilder();
wo = 0;
last = open;
}
} else if (c == close[wc]) {
if (wo > 0) {
sb.append(open, 0, wo);
}
wo = 0;
wc++;
if (close.length == wc) {
// found close
if (last == open) {
final String variable = pResolver.get(sb.toString());
if (variable != null) {
out.append(variable);
} else {
out.append(open);
out.append(sb);
out.append(close);
}
} else {
out.append(sb);
out.append(close);
}
sb = new StringBuilder();
level--;
wc = 0;
last = close;
}
} else {
if (wo > 0) {
sb.append(open, 0, wo);
}
if (wc > 0) {
sb.append(close, 0, wc);
}
sb.append(c);
wo = wc = 0;
}
}
if (wo > 0) {
sb.append(open, 0, wo);
}
if (wc > 0) {
sb.append(close, 0, wc);
}
if (level > 0) {
out.append(open);
}
out.append(sb);
return out.toString();
}
|
java
|
synchronized long getNextIndex(String server) {
checkState(role == Role.LEADER, "role:%s", role);
checkState(self.equals(leader), "self:%s leader:%s", self, leader);
checkState(serverData.containsKey(server), "server:%s", server);
return serverData.get(server).nextIndex;
}
|
java
|
public EList<Byte> getValues()
{
if (values == null)
{
values = new EDataTypeEList<Byte>(Byte.class, this, TypesPackage.JVM_BYTE_ANNOTATION_VALUE__VALUES);
}
return values;
}
|
java
|
public BoxLegalHoldAssignment.Info assignTo(BoxResource resource) {
return BoxLegalHoldAssignment.create(
this.getAPI(), this.getID(), BoxResource.getResourceType(resource.getClass()), resource.getID());
}
|
python
|
def record_set_create_or_update(name, zone_name, resource_group, record_type, **kwargs):
'''
.. versionadded:: Fluorine
Creates or updates a record set within a DNS zone.
:param name: The name of the record set, relative to the name of the zone.
:param zone_name: The name of the DNS zone (without a terminating dot).
:param resource_group: The name of the resource group.
:param record_type: The type of DNS record in this record set. Record sets of type SOA can be
updated but not created (they are created when the DNS zone is created).
Possible values include: 'A', 'AAAA', 'CAA', 'CNAME', 'MX', 'NS', 'PTR', 'SOA', 'SRV', 'TXT'
CLI Example:
.. code-block:: bash
salt-call azurearm_dns.record_set_create_or_update myhost myzone testgroup A
arecords='[{ipv4_address: 10.0.0.1}]' ttl=300
'''
dnsconn = __utils__['azurearm.get_client']('dns', **kwargs)
try:
record_set_model = __utils__['azurearm.create_object_model']('dns', 'RecordSet', **kwargs)
except TypeError as exc:
result = {'error': 'The object model could not be built. ({0})'.format(str(exc))}
return result
try:
record_set = dnsconn.record_sets.create_or_update(
relative_record_set_name=name,
zone_name=zone_name,
resource_group_name=resource_group,
record_type=record_type,
parameters=record_set_model,
if_match=kwargs.get('if_match'),
if_none_match=kwargs.get('if_none_match')
)
result = record_set.as_dict()
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('dns', str(exc), **kwargs)
result = {'error': str(exc)}
except SerializationError as exc:
result = {'error': 'The object model could not be parsed. ({0})'.format(str(exc))}
return result
|
python
|
def insert_arguments_into_sql_query(compilation_result, arguments):
"""Insert the arguments into the compiled SQL query to form a complete query.
Args:
compilation_result: CompilationResult, compilation result from the GraphQL compiler.
arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects.
Returns:
SQLAlchemy Selectable, a executable SQL query with parameters bound.
"""
if compilation_result.language != SQL_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
return base_query.params(**arguments)
|
java
|
public Set<Entry<K, T, V>> entrySet() {
Set<Entry<K, T, V>> ret = new HashSet<>();
for (Pair<K, T> pair : backedMap.keySet()) {
ret.add(new Entry<>(pair.getFirst(), pair.getSecond(), backedMap.get(pair)));
}
return ret;
}
|
java
|
private void failTask(Throwable cause)
{
TaskStatus taskStatus = getTaskStatus();
if (!taskStatus.getState().isDone()) {
log.debug(cause, "Remote task %s failed with %s", taskStatus.getSelf(), cause);
}
abort(failWith(getTaskStatus(), FAILED, ImmutableList.of(toFailure(cause))));
}
|
java
|
@Benchmark
@OperationsPerInvocation(20)
public void benchmarkByteBuddyWithPrefix(Blackhole blackHole) {
blackHole.consume(byteBuddyWithPrefixInstance.method(booleanValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(byteValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(shortValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(intValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(charValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(intValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(longValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(floatValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(doubleValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(stringValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(booleanValue, booleanValue, booleanValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(byteValue, byteValue, byteValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(shortValue, shortValue, shortValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(intValue, intValue, intValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(charValue, charValue, charValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(intValue, intValue, intValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(longValue, longValue, longValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(floatValue, floatValue, floatValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(doubleValue, doubleValue, doubleValue));
blackHole.consume(byteBuddyWithPrefixInstance.method(stringValue, stringValue, stringValue));
}
|
java
|
public static StringIsLessThanOrEqual isLessThanOrEqual(StringExpression left, Object constant) {
if (!(constant instanceof String))
throw new IllegalArgumentException("constant is not a String");
return new StringIsLessThanOrEqual(left, constant((String)constant));
}
|
python
|
def calculate_totals( self, children, local_children=None ):
"""Calculate our cumulative totals from children and/or local children"""
for field,local_field in (('recursive','calls'),('cumulative','local')):
values = []
for child in children:
if isinstance( child, PStatGroup ) or not self.LOCAL_ONLY:
values.append( getattr( child, field, 0 ) )
elif isinstance( child, PStatRow ) and self.LOCAL_ONLY:
values.append( getattr( child, local_field, 0 ) )
value = sum( values )
setattr( self, field, value )
if self.recursive:
self.cumulativePer = self.cumulative/float(self.recursive)
else:
self.recursive = 0
if local_children:
for field in ('local','calls'):
value = sum([ getattr( child, field, 0 ) for child in children] )
setattr( self, field, value )
if self.calls:
self.localPer = self.local / self.calls
else:
self.local = 0
self.calls = 0
self.localPer = 0
|
java
|
public static FloatBuffer allocate (int capacity) {
if (capacity < 0) {
throw new IllegalArgumentException();
}
ByteBuffer bb = ByteBuffer.allocateDirect(capacity * 4);
bb.order(ByteOrder.nativeOrder());
return bb.asFloatBuffer();
}
|
java
|
void index(BioPAXElement bpe, IndexWriter indexWriter) {
// create a new document
final Document doc = new Document();
// save URI (not indexed field)
Field field = new StoredField(FIELD_URI, bpe.getUri());
doc.add(field);
// index and store but not analyze/tokenize the biopax class name:
field = new StringField(FIELD_TYPE, bpe.getModelInterface().getSimpleName().toLowerCase(), Field.Store.YES);
doc.add(field);
// make index fields from the annotations map (of pre-calculated/inferred values)
if(!bpe.getAnnotations().isEmpty()) {
if(bpe.getAnnotations().containsKey(FIELD_PATHWAY)) {
addPathways((Set<Pathway>)bpe.getAnnotations().get(FIELD_PATHWAY), doc);
}
if(bpe.getAnnotations().containsKey(FIELD_ORGANISM)) {
addOrganisms((Set<BioSource>)bpe.getAnnotations().get(FIELD_ORGANISM), doc);
}
if(bpe.getAnnotations().containsKey(FIELD_DATASOURCE)) {
addDatasources((Set<Provenance>)bpe.getAnnotations().get(FIELD_DATASOURCE), doc);
}
if(bpe.getAnnotations().containsKey(FIELD_KEYWORD)) {
addKeywords((Set<String>)bpe.getAnnotations().get(FIELD_KEYWORD), doc);
}
if(bpe.getAnnotations().containsKey(FIELD_N_PARTICIPANTS)) {
field = new StoredField(FIELD_N_PARTICIPANTS,
Integer.parseInt((String)bpe.getAnnotations().get(FIELD_N_PARTICIPANTS)));
doc.add(field);
}
if(bpe.getAnnotations().containsKey(FIELD_N_PROCESSES)) {
field = new IntField(FIELD_N,
Integer.parseInt((String)bpe.getAnnotations().get(FIELD_N_PROCESSES)), Field.Store.NO);
doc.add(field);
field = new StoredField(FIELD_N_PROCESSES,
Integer.parseInt((String)bpe.getAnnotations().get(FIELD_N_PROCESSES)));
doc.add(field);
}
if(bpe.getAnnotations().containsKey(FIELD_XREFID)) {
//index biological IDs as keywords
addKeywords((Set<String>)bpe.getAnnotations().get(FIELD_XREFID), doc);
//index all IDs using "xrefid" fields
for (String id : (Set<String>)bpe.getAnnotations().get(FIELD_XREFID)) {
Field f = new StringField(FIELD_XREFID, id.toLowerCase(), Field.Store.NO);
doc.add(f);
}
}
}
bpe.getAnnotations().remove(FIELD_KEYWORD);
bpe.getAnnotations().remove(FIELD_DATASOURCE);
bpe.getAnnotations().remove(FIELD_ORGANISM);
bpe.getAnnotations().remove(FIELD_PATHWAY);
bpe.getAnnotations().remove(FIELD_N_PARTICIPANTS);
bpe.getAnnotations().remove(FIELD_N_PROCESSES);
bpe.getAnnotations().remove(FIELD_XREFID);
// name
if(bpe instanceof Named) {
Named named = (Named) bpe;
if(named.getStandardName() != null) {
field = new TextField(FIELD_NAME, named.getStandardName(), Field.Store.NO);
field.setBoost(3.5f);
doc.add(field);
}
if(named.getDisplayName() != null && !named.getDisplayName().equalsIgnoreCase(named.getStandardName())) {
field = new TextField(FIELD_NAME, named.getDisplayName(), Field.Store.NO);
field.setBoost(3.0f);
doc.add(field);
}
for(String name : named.getName()) {
if(name.equalsIgnoreCase(named.getDisplayName()) || name.equalsIgnoreCase(named.getStandardName()))
continue;
field = new TextField(FIELD_NAME, name.toLowerCase(), Field.Store.NO);
field.setBoost(2.5f);
doc.add(field);
}
}
// write
try {
indexWriter.addDocument(doc);
} catch (IOException e) {
throw new RuntimeException("Failed to index; " + bpe.getUri(), e);
}
}
|
python
|
def group_shelf_fqfn(self):
"""Return groups shelf fully qualified filename.
For testing/debugging a previous shelf file can be copied into the tc_temp_path directory
instead of creating a new shelf file.
"""
if self._group_shelf_fqfn is None:
# new shelf file
self._group_shelf_fqfn = os.path.join(
self.tcex.args.tc_temp_path, 'groups-{}'.format(str(uuid.uuid4()))
)
# saved shelf file
if self.saved_groups:
self._group_shelf_fqfn = os.path.join(self.tcex.args.tc_temp_path, 'groups-saved')
return self._group_shelf_fqfn
|
python
|
def bottom(self):
"""Bottom coordinate."""
if self._has_real():
return self._data.real_bottom
return self._data.bottom
|
python
|
def save_storage(self, instance, schema):
"""Save basic:json values to a Storage collection."""
for field_schema, fields in iterate_fields(instance, schema):
name = field_schema['name']
value = fields[name]
if field_schema.get('type', '').startswith('basic:json:'):
if value and not self.pk:
raise ValidationError(
'Data object must be `created` before creating `basic:json:` fields')
if isinstance(value, int):
# already in Storage
continue
if isinstance(value, str):
file_path = self.location.get_path(filename=value) # pylint: disable=no-member
if os.path.isfile(file_path):
try:
with open(file_path) as file_handler:
value = json.load(file_handler)
except json.JSONDecodeError:
with open(file_path) as file_handler:
content = file_handler.read()
content = content.rstrip()
raise ValidationError(
"Value of '{}' must be a valid JSON, current: {}".format(name, content)
)
storage = self.storages.create( # pylint: disable=no-member
name='Storage for data id {}'.format(self.pk),
contributor=self.contributor,
json=value,
)
# `value` is copied by value, so `fields[name]` must be changed
fields[name] = storage.pk
|
java
|
@Override
public SqlContext param(final String parameterName, final Object value, final int sqlType) {
if (value instanceof Optional) {
Optional<?> optionalValue = (Optional<?>) value;
if (optionalValue.isPresent()) {
param(new Parameter(parameterName, optionalValue.get(), sqlType));
} else {
param(new Parameter(parameterName, null));
}
return this;
} else {
return param(new Parameter(parameterName, value, sqlType));
}
}
|
python
|
def _repopulate_pool(self):
"""
Bring the number of pool processes up to the specified number, for use
after reaping workers which have exited.
"""
for i in range(self._processes - len(self._pool)):
w = self.Process(target=worker,
args=(self._inqueue, self._outqueue,
self._initializer,
self._initargs, self._maxtasksperchild,
self._wrap_exception,
self._finalizer,
self._finalargs)
)
self._pool.append(w)
w.name = w.name.replace('Process', 'PoolWorker')
w.daemon = True
w.start()
util.debug('added worker')
|
java
|
public @CheckForNull Computer toComputer() {
Jenkins j = Jenkins.getInstanceOrNull();
if (j != null) {
for (Computer c : j.getComputers()) {
if (getChannel()==c.getChannel()) {
return c;
}
}
}
return null;
}
|
java
|
public void releaseExceptionally(Throwable e) {
ArrayList<CompletableFuture<T>> toComplete = null;
synchronized (lock) {
if (!waitingFutures.isEmpty()) {
toComplete = new ArrayList<>(waitingFutures);
waitingFutures.clear();
}
this.e = e;
this.result = null;
released = true;
}
if (toComplete != null) {
for (CompletableFuture<T> f : toComplete) {
f.completeExceptionally(e);
}
}
}
|
python
|
def get_singular_and_plural_dtype(self, dtype):
"""
Parameters
----------
dtype : str
MagIC table type (specimens, samples, contribution, etc.)
Returns
---------
name : str
singular name for MagIC table ('specimen' for specimens table, etc.)
dtype : str
plural dtype for MagIC table ('specimens' for specimens table, etc.)
"""
dtype = dtype.strip()
if dtype.endswith('s'):
return dtype[:-1], dtype
elif dtype == 'criteria':
return 'table_column', 'criteria'
elif dtype == 'contribution':
return 'doi', 'contribution'
|
python
|
def color(self):
"""Line color in IDA View"""
color = idc.GetColor(self.ea, idc.CIC_ITEM)
if color == 0xFFFFFFFF:
return None
return color
|
python
|
def npci_contents(self, use_dict=None, as_class=dict):
"""Return the contents of an object as a dict."""
if _debug: NPCI._debug("npci_contents use_dict=%r as_class=%r", use_dict, as_class)
# make/extend the dictionary of content
if use_dict is None:
if _debug: NPCI._debug(" - new use_dict")
use_dict = as_class()
# version and control are simple
use_dict.__setitem__('version', self.npduVersion)
use_dict.__setitem__('control', self.npduControl)
# dnet/dlen/dadr
if self.npduDADR is not None:
if self.npduDADR.addrType == Address.remoteStationAddr:
use_dict.__setitem__('dnet', self.npduDADR.addrNet)
use_dict.__setitem__('dlen', self.npduDADR.addrLen)
use_dict.__setitem__('dadr', btox(self.npduDADR.addrAddr or ''))
elif self.npduDADR.addrType == Address.remoteBroadcastAddr:
use_dict.__setitem__('dnet', self.npduDADR.addrNet)
use_dict.__setitem__('dlen', 0)
use_dict.__setitem__('dadr', '')
elif self.npduDADR.addrType == Address.globalBroadcastAddr:
use_dict.__setitem__('dnet', 0xFFFF)
use_dict.__setitem__('dlen', 0)
use_dict.__setitem__('dadr', '')
# snet/slen/sadr
if self.npduSADR is not None:
use_dict.__setitem__('snet', self.npduSADR.addrNet)
use_dict.__setitem__('slen', self.npduSADR.addrLen)
use_dict.__setitem__('sadr', btox(self.npduSADR.addrAddr or ''))
# hop count
if self.npduHopCount is not None:
use_dict.__setitem__('hop_count', self.npduHopCount)
# network layer message name decoded
if self.npduNetMessage is not None:
use_dict.__setitem__('net_message', self.npduNetMessage)
if self.npduVendorID is not None:
use_dict.__setitem__('vendor_id', self.npduVendorID)
# return what we built/updated
return use_dict
|
python
|
def convert_to_mb(s):
"""Convert memory size from GB to MB."""
s = s.upper()
try:
if s.endswith('G'):
return float(s[:-1].strip()) * 1024
elif s.endswith('T'):
return float(s[:-1].strip()) * 1024 * 1024
else:
return float(s[:-1].strip())
except (IndexError, ValueError, KeyError, TypeError):
errmsg = ("Invalid memory format: %s") % s
raise exception.SDKInternalError(msg=errmsg)
|
python
|
def get_self_url_host(request_data):
"""
Returns the protocol + the current host + the port (if different than
common ports).
:param request_data: The request as a dict
:type: dict
:return: Url
:rtype: string
"""
current_host = OneLogin_Saml2_Utils.get_self_host(request_data)
port = ''
if OneLogin_Saml2_Utils.is_https(request_data):
protocol = 'https'
else:
protocol = 'http'
if 'server_port' in request_data and request_data['server_port'] is not None:
port_number = str(request_data['server_port'])
port = ':' + port_number
if protocol == 'http' and port_number == '80':
port = ''
elif protocol == 'https' and port_number == '443':
port = ''
return '%s://%s%s' % (protocol, current_host, port)
|
java
|
private void sortPoints() {
final ControlPoint firstPt = (ControlPoint)list.get(0);
final ControlPoint lastPt = (ControlPoint)list.get(list.size()-1);
Comparator compare = new Comparator() {
public int compare(Object first, Object second) {
if (first == firstPt) {
return -1;
}
if (second == lastPt) {
return -1;
}
float a = ((ControlPoint) first).pos;
float b = ((ControlPoint) second).pos;
return (int) ((a-b) * 10000);
}
};
Collections.sort(list, compare);
}
|
java
|
public Collection<Defect> defects(DefectFilter filter) {
return get(Defect.class, (filter != null) ? filter : new DefectFilter());
}
|
python
|
def is_exponent_push_token(cls, token):
"""Returns `True` if the token is an Exponent push token"""
import six
return (
isinstance(token, six.string_types) and
token.startswith('ExponentPushToken'))
|
java
|
public UnusedStubbings getUnusedStubbings(Iterable<Object> mocks) {
Set<Stubbing> stubbings = AllInvocationsFinder.findStubbings(mocks);
List<Stubbing> unused = filter(stubbings, new Filter<Stubbing>() {
public boolean isOut(Stubbing s) {
return !UnusedStubbingReporting.shouldBeReported(s);
}
});
return new UnusedStubbings(unused);
}
|
python
|
def get_installed_classes(cls):
"""
Iterates over installed plugins associated with the `entry_point` and
returns a dictionary of viable ones keyed off of their names.
A viable installed plugin is one that is both loadable *and* a subclass
of the Pluggable subclass in question.
"""
installed_classes = {}
for entry_point in pkg_resources.iter_entry_points(cls.entry_point):
try:
plugin = entry_point.load()
except ImportError as e:
logger.error(
"Could not load plugin %s: %s", entry_point.name, str(e)
)
continue
if not issubclass(plugin, cls):
logger.error(
"Could not load plugin %s:" +
" %s class is not subclass of %s",
entry_point.name, plugin.__class__.__name__, cls.__name__
)
continue
if not plugin.validate_dependencies():
logger.error(
"Could not load plugin %s:" +
" %s class dependencies not met",
entry_point.name, plugin.__name__
)
continue
installed_classes[entry_point.name] = plugin
return installed_classes
|
java
|
private void setTree(ComponentTree tree) {
synchronized (this) {
this.tree = tree;
for (ComponentVertex child : children) {
child.setTree(tree);
}
}
}
|
java
|
private String[] requiredLibraries() {
List<String> libraries = new ArrayList<String>(Arrays.asList(
"org.dbunit",
"org.apache.commons",
"org.apache.log4j",
"org.slf4j",
"org.yaml",
"org.codehaus.jackson"
));
if (!dbunitConfigurationInstance.get().isExcludePoi()) {
libraries.add("org.apache.poi");
}
return libraries.toArray(new String[libraries.size()]);
}
|
python
|
def append(self, message_level, message_text):
""" Adds a message level/text pair to this MessagesHeader """
if not message_level in MessagesHeader._message_levels:
raise ValueError('message_level="%s"' % message_level)
self._messages.append((message_level, message_text))
|
python
|
def mean(name, add, match):
'''
Accept a numeric value from the matched events and store a running average
of the values in the given register. If the specified value is not numeric
it will be skipped
USAGE:
.. code-block:: yaml
foo:
reg.mean:
- add: data_field
- match: my/custom/event
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if name not in __reg__:
__reg__[name] = {}
__reg__[name]['val'] = 0
__reg__[name]['total'] = 0
__reg__[name]['count'] = 0
for event in __events__:
try:
event_data = event['data']['data']
except KeyError:
event_data = event['data']
if salt.utils.stringutils.expr_match(event['tag'], match):
if add in event_data:
try:
comp = int(event_data)
except ValueError:
continue
__reg__[name]['total'] += comp
__reg__[name]['count'] += 1
__reg__[name]['val'] = __reg__[name]['total'] / __reg__[name]['count']
return ret
|
python
|
def _on_del_route(self, msg):
"""
Stub :data:`DEL_ROUTE` handler; fires 'disconnect' events on the
corresponding :attr:`_context_by_id` member. This is replaced by
:class:`mitogen.parent.RouteMonitor` in an upgraded context.
"""
LOG.error('%r._on_del_route() %r', self, msg)
if msg.is_dead:
return
target_id_s, _, name = bytes_partition(msg.data, b(':'))
target_id = int(target_id_s, 10)
context = self._context_by_id.get(target_id)
if context:
fire(context, 'disconnect')
else:
LOG.debug('DEL_ROUTE for unknown ID %r: %r', target_id, msg)
|
java
|
public static void addValuesEqualCall(final CodeBuilder b,
final TypeDesc valueType,
final boolean testForNull,
final Label label,
final boolean choice)
{
if (valueType.getTypeCode() != TypeDesc.OBJECT_CODE) {
if (valueType.getTypeCode() == TypeDesc.FLOAT_CODE) {
// Special treatment to handle NaN.
b.invokeStatic(TypeDesc.FLOAT.toObjectType(), "compare", TypeDesc.INT,
new TypeDesc[] {TypeDesc.FLOAT, TypeDesc.FLOAT});
b.ifZeroComparisonBranch(label, choice ? "==" : "!=");
} else if (valueType.getTypeCode() == TypeDesc.DOUBLE_CODE) {
// Special treatment to handle NaN.
b.invokeStatic(TypeDesc.DOUBLE.toObjectType(), "compare", TypeDesc.INT,
new TypeDesc[] {TypeDesc.DOUBLE, TypeDesc.DOUBLE});
b.ifZeroComparisonBranch(label, choice ? "==" : "!=");
} else {
b.ifComparisonBranch(label, choice ? "==" : "!=", valueType);
}
return;
}
if (!testForNull) {
String op = addEqualsCallTo(b, valueType, choice);
b.ifZeroComparisonBranch(label, op);
return;
}
Label isNotNull = b.createLabel();
LocalVariable value = b.createLocalVariable(null, valueType);
b.storeLocal(value);
b.loadLocal(value);
b.ifNullBranch(isNotNull, false);
// First value popped off stack is null. Just test remaining one for null.
b.ifNullBranch(label, choice);
Label cont = b.createLabel();
b.branch(cont);
// First value popped off stack is not null, but second one might be.
isNotNull.setLocation();
if (compareToType(valueType) == null) {
// Call equals method, but swap values so that the second value is
// an argument into the equals method.
b.loadLocal(value);
b.swap();
} else {
// Need to test for second argument too, since compareTo method
// cannot cope with null.
LocalVariable value2 = b.createLocalVariable(null, valueType);
b.storeLocal(value2);
b.loadLocal(value2);
b.ifNullBranch(label, !choice);
// Load both values in preparation for calling compareTo method.
b.loadLocal(value);
b.loadLocal(value2);
}
String op = addEqualsCallTo(b, valueType, choice);
b.ifZeroComparisonBranch(label, op);
cont.setLocation();
}
|
python
|
def get_files_by_layer(self, layer_name, file_pattern='*'):
"""
returns a list of all files with the given filename pattern in the
given PCC annotation layer
"""
layer_path = os.path.join(self.path, layer_name)
return list(dg.find_files(layer_path, file_pattern))
|
python
|
def monitor(self, job_id, timeout=5):
"""
Monitor the status of a job.
## Arguments
* `job_id` (int): The job to check.
* `timeout` (float): The time to wait between checks (in sec).
## Returns
* `code` (int): The status.
* `status` (str): The human-readable name of the current status.
"""
while True:
status = self.status(job_id)
logging.info("Monitoring job: %d - Status: %d, %s"
%(job_id, status[0], status[1]))
if status[0] in [3, 4, 5]:
return status
time.sleep(timeout)
|
java
|
public ResponseBuilder deleteResource(final Resource res) {
final String baseUrl = getBaseUrl();
final String identifier = baseUrl + req.getPartition() + req.getPath();
final Session session = ofNullable(req.getSession()).orElseGet(HttpSession::new);
// Check if this is already deleted
checkDeleted(res, identifier);
// Check the cache
final EntityTag etag = new EntityTag(md5Hex(res.getModified() + identifier));
checkCache(req.getRequest(), res.getModified(), etag);
LOGGER.debug("Deleting {}", identifier);
try (final TrellisDataset dataset = TrellisDataset.createDataset()) {
// Add the audit quads
audit.ifPresent(svc -> svc.deletion(res.getIdentifier(), session).stream()
.map(skolemizeQuads(resourceService, baseUrl)).forEachOrdered(dataset::add));
// When deleting just the ACL graph, keep the user managed triples in tact
if (ACL.equals(req.getExt())) {
try (final Stream<? extends Triple> triples = res.stream(PreferUserManaged)) {
triples.map(t -> rdf.createQuad(PreferUserManaged, t.getSubject(), t.getPredicate(), t.getObject()))
.forEachOrdered(dataset::add);
}
}
// delete the resource
if (resourceService.put(res.getIdentifier(), dataset.asDataset())) {
return status(NO_CONTENT);
}
}
LOGGER.error("Unable to delete resource at {}", res.getIdentifier());
return serverError().entity("Unable to delete resource. Please consult the logs for more information");
}
|
python
|
def separation_in_list(separation_indices, separation_indices_list):
"""
Checks if the separation indices of a plane are already in the list
:param separation_indices: list of separation indices (three arrays of integers)
:param separation_indices_list: list of the list of separation indices to be compared to
:return: True if the separation indices are already in the list, False otherwise
"""
sorted_separation = sort_separation(separation_indices)
for sep in separation_indices_list:
if len(sep[1]) == len(sorted_separation[1]) and np.allclose(sorted_separation[1], sep[1]):
return True
return False
|
java
|
private List<Integer> collapseGT(List<String> gtsStr) {
if (gtsStr.isEmpty()) {
return Collections.emptyList();
}
if (gtsStr.size() == 1) {
return Collections.singletonList(0);
}
List<Genotype> gts = gtsStr.stream().map(Genotype::new).collect(Collectors.toList());
// only get GT with an ALT e.g 0/1 0/2 1/2 etc. (ignore ./. and 0/0 GT)
Predicate<Genotype> findAlts = gt -> Arrays.stream(gt.getAllelesIdx()).anyMatch(i -> i > 0);
Predicate<Genotype> findHomRef = gt -> Arrays.stream(gt.getAllelesIdx()).allMatch(i -> i == 0);
Predicate<Genotype> findOneRef = gt -> Arrays.stream(gt.getAllelesIdx()).anyMatch(i -> i == 0);
Predicate<Genotype> findNoCalls = gt -> Arrays.stream(gt.getAllelesIdx()).anyMatch(i -> i < 0);
List<Integer> oneAltAllele = getMatchingPositions(gts, findAlts);
if (!oneAltAllele.isEmpty()) {
return oneAltAllele;
}
List<Integer> reference = getMatchingPositions(gts, findHomRef);
if (!reference.isEmpty()) {
return reference;
}
List<Integer> oneReferenceAllele = getMatchingPositions(gts, findOneRef);
if (!oneReferenceAllele.isEmpty()) {
return oneReferenceAllele;
}
// only no-calls left -> try to collapse
List<Integer> nocalls = getMatchingPositions(gts, findNoCalls);
if (nocalls.size() == gtsStr.size()) { // all GT found
return Collections.singletonList(nocalls.get(0));
}
// don't know that could be left!!!
if (this.collapseDeletions) {
throw new IllegalStateException("Not able to resolve GT: " + StringUtils.join(gtsStr, ","));
}
return IntStream.range(0, gtsStr.size() - 1).boxed().collect(Collectors.toList());
}
|
java
|
private static void tryForceNotJava1()
{
String version = System.getProperty("java.version");
// N.B. Log4j has issues with JVM versions written like "9" and "10"
if (version != null && !version.startsWith("1."))
{
try
{
Class<Loader> loader = Loader.class;
for (Field field : loader.getDeclaredFields())
{
if (field.getName().equals("java1"))
{
field.setAccessible(true);
field.setBoolean(null, false);
}
}
}
catch (Throwable t)
{
// ignore, we must have been denied access. MDC may not work correctly due to log4j 1.2 limitations
}
}
}
|
java
|
public static int decodeLinear(
byte[] data,
int dataSize,
double[] result
) {
int ri = 2;
long[] ints = new long[3];
long extrapol;
long y;
IntDecoder dec = new IntDecoder(data, 16);
if (dataSize < 8) {
return -1;
}
double fixedPoint = decodeFixedPoint(data);
if (dataSize < 12) {
return -1;
}
ints[1] = 0;
for (int i = 0; i < 4; i++) {
ints[1] = ints[1] | ((0xFFl & data[8 + i]) << (i * 8));
}
result[0] = ints[1] / fixedPoint;
if (dataSize == 12) {
return 1;
}
if (dataSize < 16) {
return -1;
}
ints[2] = 0;
for (int i = 0; i < 4; i++) {
ints[2] = ints[2] | ((0xFFl & data[12 + i]) << (i * 8));
}
result[1] = ints[2] / fixedPoint;
while (dec.pos < dataSize) {
if (dec.pos == (dataSize - 1) && dec.half) {
if ((data[dec.pos] & 0xf) != 0x8) {
break;
}
}
ints[0] = ints[1];
ints[1] = ints[2];
ints[2] = dec.next();
extrapol = ints[1] + (ints[1] - ints[0]);
y = extrapol + ints[2];
result[ri++] = y / fixedPoint;
ints[2] = y;
}
return ri;
}
|
java
|
protected boolean register(IEventListener listener) {
log.debug("register - listener: {}", listener);
boolean registered = listeners.add(listener);
if (registered) {
listenerStats.increment();
// prepare response for new client
ownerMessage.addEvent(Type.CLIENT_INITIAL_DATA, null, null);
if (!isPersistent()) {
ownerMessage.addEvent(Type.CLIENT_CLEAR_DATA, null, null);
}
if (!attributes.isEmpty()) {
ownerMessage.addEvent(new SharedObjectEvent(Type.CLIENT_UPDATE_DATA, null, getAttributes()));
}
// we call notifyModified here to send response if we're not in a beginUpdate block
notifyModified();
}
return registered;
}
|
java
|
@Override
public void serializeInstance(SerializationStreamWriter streamWriter, EntityType instance) throws SerializationException {
serialize(streamWriter, instance);
}
|
python
|
def _humanize_bytes(num_bytes, precision=1):
"""
Return a humanized string representation of a number of num_bytes.
from:
http://code.activestate.com/recipes/
577081-humanized-representation-of-a-number-of-num_bytes/
Assumes `from __future__ import division`.
>>> humanize_bytes(1)
'1 byte'
>>> humanize_bytes(1024)
'1.0 kB'
>>> humanize_bytes(1024*123)
'123.0 kB'
>>> humanize_bytes(1024*12342)
'12.1 MB'
>>> humanize_bytes(1024*12342,2)
'12.05 MB'
>>> humanize_bytes(1024*1234,2)
'1.21 MB'
>>> humanize_bytes(1024*1234*1111,2)
'1.31 GB'
>>> humanize_bytes(1024*1234*1111,1)
'1.3 GB'
"""
if num_bytes == 0:
return 'no bytes'
if num_bytes == 1:
return '1 byte'
factored_bytes = 0
factor_suffix = 'bytes'
for factor, suffix in ABBREVS:
if num_bytes >= factor:
factored_bytes = num_bytes / factor
factor_suffix = suffix
break
if factored_bytes == 1:
precision = 0
return '{:.{prec}f} {}'.format(factored_bytes, factor_suffix,
prec=precision)
|
java
|
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case TypesPackage.JVM_CUSTOM_ANNOTATION_VALUE__VALUES:
getValues().clear();
getValues().addAll((Collection<? extends EObject>)newValue);
return;
}
super.eSet(featureID, newValue);
}
|
java
|
public T validator(MessageValidator<? extends ValidationContext> ... validators) {
Stream.of(validators).forEach(getAction()::addValidator);
return self;
}
|
java
|
public void setCapacity(String queue,float capacity) {
rmConf.setFloat(toFullPropertyName(queue, CAPACITY_PROPERTY),capacity);
}
|
python
|
def write_multiple_files(args, base_dir, crawler):
"""Write to multiple output files and/or subdirectories."""
for i, query in enumerate(args['query']):
if query in args['files']:
# Write files
if args['out'] and i < len(args['out']):
outfilename = args['out'][i]
else:
outfilename = '.'.join(query.split('.')[:-1])
write_files(args, [query], outfilename)
elif query in args['urls']:
# Scrape/crawl urls
domain = utils.get_domain(query)
if args['html']:
# Create a directory to save PART.html files in
if not args['quiet']:
print('Storing html files in {0}/'.format(domain))
utils.mkdir_and_cd(domain)
if args['crawl'] or args['crawl_all']:
# Crawl and save HTML files/image files to disk
infilenames = crawler.crawl_links(query)
else:
raw_resp = utils.get_raw_resp(query)
if raw_resp is None:
return False
# Saves page as PART.html file
prev_part_num = utils.get_num_part_files()
utils.write_part_file(args, query, raw_resp)
curr_part_num = prev_part_num + 1
infilenames = utils.get_part_filenames(curr_part_num, prev_part_num)
# Convert output or leave as PART.html files
if args['html']:
# HTML files have been written already, so return to base dir
os.chdir(base_dir)
else:
# Write files to text or pdf
if infilenames:
if args['out'] and i < len(args['out']):
outfilename = args['out'][i]
else:
outfilename = utils.get_outfilename(query, domain)
write_files(args, infilenames, outfilename)
else:
sys.stderr.write('Failed to retrieve content from {0}.\n'
.format(query))
return True
|
java
|
public OpenPgpMetadata addOxMessage(Message message, Set<OpenPgpContact> contacts, List<ExtensionElement> payload)
throws SmackException.NotLoggedInException, IOException, PGPException {
HashSet<OpenPgpContact> recipients = new HashSet<>(contacts);
OpenPgpContact self = openPgpManager.getOpenPgpSelf();
recipients.add(self);
OpenPgpElementAndMetadata openPgpElementAndMetadata = signAndEncrypt(recipients, payload);
message.addExtension(openPgpElementAndMetadata.getElement());
// Set hints on message
ExplicitMessageEncryptionElement.set(message,
ExplicitMessageEncryptionElement.ExplicitMessageEncryptionProtocol.openpgpV0);
StoreHint.set(message);
setOXBodyHint(message);
return openPgpElementAndMetadata.getMetadata();
}
|
python
|
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if loader is not None and hasattr(loader, "get_source"):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, 'rb') as fp:
source = fp.read().splitlines()
except (OSError, IOError):
pass
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a Unicode
# string, then we should do that ourselves.
if isinstance(source[0], six.binary_type):
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (http://www.python.org/dev/peps/pep-0263/)
match = re.search(br'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match.group(1).decode('ascii')
break
source = [six.text_type(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1:upper_bound]
return lower_bound, pre_context, context_line, post_context
|
java
|
private static boolean mergeAllowed(Element child, IAuthorizationPrincipal ap)
throws AuthorizationException {
if (!child.getTagName().equals("channel")) return true;
String channelPublishId = child.getAttribute("chanID");
return ap.canRender(channelPublishId);
}
|
python
|
def search_index_path(self, index=None, **options):
"""
Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_index:
raise RiakError("Yokozuna search is unsupported by this Riak node")
if index:
quote_plus(index)
return mkpath(self.yz_wm_index, "index", index, **options)
|
java
|
private int importTables(Referenceable databaseReferenceable, String databaseName, final boolean failOnError) throws Exception {
int tablesImported = 0;
List<String> hiveTables = hiveClient.getAllTables(databaseName);
LOG.info("Importing tables {} for db {}", hiveTables.toString(), databaseName);
for (String tableName : hiveTables) {
int imported = importTable(databaseReferenceable, databaseName, tableName, failOnError);
tablesImported += imported;
}
if (tablesImported == hiveTables.size()) {
LOG.info("Successfully imported all {} tables from {} ", tablesImported, databaseName);
} else {
LOG.error("Able to import {} tables out of {} tables from {}. Please check logs for import errors", tablesImported, hiveTables.size(), databaseName);
}
return tablesImported;
}
|
python
|
def _set_pyqtgraph_title(layout):
"""
Private function to add a title to the first row of the window.
Returns True if a Title is set. Else, returns False.
"""
if 'title_size' in pytplot.tplot_opt_glob:
size = pytplot.tplot_opt_glob['title_size']
if 'title_text' in pytplot.tplot_opt_glob:
if pytplot.tplot_opt_glob['title_text'] != '':
layout.addItem(LabelItem(pytplot.tplot_opt_glob['title_text'], size=size, color='k'), row=0, col=0)
return True
return False
|
python
|
def add_entry_points(self, names):
"""
adds `names` to the internal collection of entry points to track
`names` can be a single object or an iterable but
must be a string or iterable of strings.
"""
names = util.return_set(names)
self.entry_point_names.update(names)
|
java
|
public JSONObject elementOpt( String key, Object value ) {
return elementOpt( key, value, new JsonConfig() );
}
|
python
|
def _wrap_result(self, result, block=None, obj=None):
"""
Wrap a single result.
"""
if obj is None:
obj = self._selected_obj
index = obj.index
if isinstance(result, np.ndarray):
# coerce if necessary
if block is not None:
if is_timedelta64_dtype(block.values.dtype):
from pandas import to_timedelta
result = to_timedelta(
result.ravel(), unit='ns').values.reshape(result.shape)
if result.ndim == 1:
from pandas import Series
return Series(result, index, name=obj.name)
return type(obj)(result, index=index, columns=block.columns)
return result
|
java
|
public static <T> Collector<T, ?, List<T>> maxAll(Comparator<? super T> comparator) {
return maxAll(comparator, Collectors.toList());
}
|
java
|
public static int decoratorNames(int ecflags, byte[][] decorators) {
switch (ecflags & NEWLINE_DECORATOR_MASK) {
case UNIVERSAL_NEWLINE_DECORATOR:
case CRLF_NEWLINE_DECORATOR:
case CR_NEWLINE_DECORATOR:
case 0:
break;
default:
return -1;
}
if (((ecflags & XML_TEXT_DECORATOR) != 0) && ((ecflags & XML_ATTR_CONTENT_DECORATOR) != 0)) return -1;
int numDecorators = 0;
if ((ecflags & XML_TEXT_DECORATOR) != 0) decorators[numDecorators++] = "xml_text_escape".getBytes();
if ((ecflags & XML_ATTR_CONTENT_DECORATOR) != 0) decorators[numDecorators++] = "xml_attr_content_escape".getBytes();
if ((ecflags & XML_ATTR_QUOTE_DECORATOR) != 0) decorators[numDecorators++] = "xml_attr_quote".getBytes();
if ((ecflags & CRLF_NEWLINE_DECORATOR) != 0) decorators[numDecorators++] = "crlf_newline".getBytes();
if ((ecflags & CR_NEWLINE_DECORATOR) != 0) decorators[numDecorators++] = "cr_newline".getBytes();
if ((ecflags & UNIVERSAL_NEWLINE_DECORATOR) != 0) decorators[numDecorators++] = "universal_newline".getBytes();
return numDecorators;
}
|
java
|
public static CommerceDiscount fetchByLtD_S_First(Date displayDate,
int status, OrderByComparator<CommerceDiscount> orderByComparator) {
return getPersistence()
.fetchByLtD_S_First(displayDate, status, orderByComparator);
}
|
python
|
def create_vpn_connection(self, type, customer_gateway_id, vpn_gateway_id):
"""
Create a new VPN Connection.
:type type: str
:param type: The type of VPN Connection. Currently only 'ipsec.1'
is supported
:type customer_gateway_id: str
:param customer_gateway_id: The ID of the customer gateway.
:type vpn_gateway_id: str
:param vpn_gateway_id: The ID of the VPN gateway.
:rtype: The newly created VpnConnection
:return: A :class:`boto.vpc.vpnconnection.VpnConnection` object
"""
params = {'Type' : type,
'CustomerGatewayId' : customer_gateway_id,
'VpnGatewayId' : vpn_gateway_id}
return self.get_object('CreateVpnConnection', params, VpnConnection)
|
java
|
public static Queue asRef( Queue queue ) throws JMSException
{
if (queue == null)
return null;
if (queue instanceof QueueRef)
return queue;
return new QueueRef(queue.getQueueName());
}
|
java
|
@Override
public URL getResource( String path )
{
URL url = null;
if ( baseLoader != null )
// classloader wants no leading slash
url = baseLoader.getResource( path.substring(1) );
if ( url == null )
url = super.getResource( path );
return url;
}
|
java
|
public boolean existsCollection(String collectionName) throws SolrException {
try {
List<String> collections = CollectionAdminRequest.listCollections(solrClient);
for (String collection : collections) {
if (collection.equals(collectionName)) {
return true;
}
}
return false;
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.CONFLICT, e);
}
}
|
java
|
@Deprecated
public void login(String username, String password)
throws LoginFailedException {
try {
String token = fetchToken("login");
try {
this.confirmLogin(token, username, password);
} catch (NeedLoginTokenException e) { // try once more
token = fetchToken("login");
this.confirmLogin(token, username, password);
}
} catch (IOException | MediaWikiApiErrorException e1) {
throw new LoginFailedException(e1.getMessage(), e1);
}
}
|
python
|
def save(self, filename, binary=True):
"""
Writes a structured grid to disk.
Parameters
----------
filename : str
Filename of grid to be written. The file extension will select the
type of writer to use. ".vtk" will use the legacy writer, while
".vts" will select the VTK XML writer.
binary : bool, optional
Writes as a binary file by default. Set to False to write ASCII.
Notes
-----
Binary files write much faster than ASCII, but binary files written on
one system may not be readable on other systems. Binary can be used
only with the legacy writer.
"""
filename = os.path.abspath(os.path.expanduser(filename))
# Use legacy writer if vtk is in filename
if '.vtk' in filename:
writer = vtk.vtkStructuredGridWriter()
if binary:
writer.SetFileTypeToBinary()
else:
writer.SetFileTypeToASCII()
elif '.vts' in filename:
writer = vtk.vtkXMLStructuredGridWriter()
if binary:
writer.SetDataModeToBinary()
else:
writer.SetDataModeToAscii()
else:
raise Exception('Extension should be either ".vts" (xml) or' +
'".vtk" (legacy)')
# Write
writer.SetFileName(filename)
writer.SetInputData(self)
writer.Write()
|
java
|
public static void touchLdpMembershipResource(final Node node, final Calendar date, final String user) {
getContainingNode(node).filter(uncheck(parent -> parent.hasProperty(LDP_MEMBER_RESOURCE))).ifPresent(parent -> {
try {
final Optional<String> hasInsertedContentProperty = ldpInsertedContentProperty(node)
.flatMap(resourceToProperty(node.getSession())).filter(uncheck(node::hasProperty));
if (parent.isNodeType(LDP_DIRECT_CONTAINER) ||
(parent.isNodeType(LDP_INDIRECT_CONTAINER) && hasInsertedContentProperty.isPresent())) {
touch(parent.getProperty(LDP_MEMBER_RESOURCE).getNode(), date, user);
}
} catch (final javax.jcr.AccessDeniedException ex) {
throw new AccessDeniedException(ex);
} catch (final RepositoryException ex) {
throw new RepositoryRuntimeException(ex);
}
});
}
|
java
|
@Override
@Transactional(enabled = false)
public CommerceAccount createCommerceAccount(long commerceAccountId) {
return commerceAccountPersistence.create(commerceAccountId);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.