language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def load_xml(self, filepath):
"""Loads the values of the configuration variables from an XML path."""
from os import path
import xml.etree.ElementTree as ET
#Make sure the file exists and then import it as XML and read the values out.
uxpath = path.expanduser(filepath)
if path.isfile(uxpath):
tree = ET.parse(uxpath)
vms("Parsing global settings from {}.".format(uxpath))
root = tree.getroot()
for child in root:
if child.tag == "var":
self._vardict[child.attrib["name"]] = child.attrib["value"] |
java | private void openCli(SessionContext context, Executor executor) {
CliClient cli = null;
try {
cli = new CliClient(context, executor);
// interactive CLI mode
if (options.getUpdateStatement() == null) {
cli.open();
}
// execute single update statement
else {
final boolean success = cli.submitUpdate(options.getUpdateStatement());
if (!success) {
throw new SqlClientException("Could not submit given SQL update statement to cluster.");
}
}
} finally {
if (cli != null) {
cli.close();
}
}
} |
java | private void finalizeSourceMap() throws IOException {
StringWriter writer = new StringWriter();
smGen.appendTo(writer, "" /* file name */); //$NON-NLS-1$
sourceMap = writer.toString();
// now add the sourcesContent field
try {
JSONObject obj = new JSONObject(sourceMap);
JSONArray sources = (JSONArray)obj.get("sources"); //$NON-NLS-1$
JSONArray sourcesContent = new JSONArray();
for (int i = 0; i < sources.length(); i++) {
String content = sourcesMap.get(sources.get(i));
sourcesContent.add(content != null ? content : ""); //$NON-NLS-1$
}
obj.put("sourcesContent", sourcesContent); //$NON-NLS-1$
StringBuffer sb = new StringBuffer(SOURCEMAP_XSSI_PREAMBLE);
sourceMap = sb.append(obj.toString()).toString();
} catch (JSONException e) {
throw new IOException(e);
}
} |
java | private long readLines(final RandomAccessFile reader) throws IOException {
final ByteArrayOutputStream lineBuf = new ByteArrayOutputStream(64);
long pos = reader.getFilePointer();
long rePos = pos; // position to re-read
int num;
boolean seenCR = false;
// FIXME replace -1 with EOF when we're merging back into commons-io
while ((num = reader.read(this.inbuf)) != -1) {
for (int i = 0; i < num; i++) {
final byte ch = this.inbuf[i];
switch (ch) {
case '\n':
seenCR = false; // swallow CR before LF
this.listener.handle(new String(lineBuf.toByteArray(), this.cset));
lineBuf.reset();
rePos = pos + i + 1;
break;
case '\r':
if (seenCR) {
lineBuf.write('\r');
}
seenCR = true;
break;
default:
if (seenCR) {
seenCR = false; // swallow final CR
this.listener.handle(new String(lineBuf.toByteArray(), this.cset));
lineBuf.reset();
rePos = pos + i + 1;
}
lineBuf.write(ch);
}
}
pos = reader.getFilePointer();
}
IOUtils.closeQuietly(lineBuf); // not strictly necessary
reader.seek(rePos); // Ensure we can re-read if necessary
return rePos;
} |
java | public void removeComponent(Object screenField)
{
if (this.getNextConverter() != null)
{
FieldInfo field = this.getField();
this.getNextConverter().removeComponent(screenField);
// **START SPECIAL CODE** This code is for a specific special case.
// This code is for the special case when one converter is used by two SFields
// I Don't free this converter if there is still an SField pointing to me.
if (field != null)
{
for (int i = 0; ; i++)
{
Object screenField2 = field.getComponent(i);
if (!(screenField2 instanceof ScreenComponent))
break; // End of loop
if (screenField2 != screenField)
{ // Okay, this field has another SField, go through the converters and make sure this one is not referenced.
Convert converter = ((ScreenComponent)screenField2).getConverter();
while ((converter != null) && (converter != field))
{
if (converter == this)
return; // DO NOT Free this converter, it is referenced by another ScreenField!
if (converter instanceof LinkedConverter)
converter = ((LinkedConverter)converter).getNextConverter();
else
converter = null;
}
}
}
}
}
super.removeComponent(screenField); // delete this
} |
python | def date_decoder(dic):
"""Add python types decoding. See JsonEncoder"""
if '__date__' in dic:
try:
d = datetime.date(**{c: v for c, v in dic.items() if not c == "__date__"})
except (TypeError, ValueError):
raise json.JSONDecodeError("Corrupted date format !", str(dic), 1)
elif '__datetime__' in dic:
try:
d = datetime.datetime(**{c: v for c, v in dic.items() if not c == "__datetime__"})
except (TypeError, ValueError):
raise json.JSONDecodeError("Corrupted datetime format !", str(dic), 1)
else:
return dic
return d |
python | def metrics_asserted_pct(self):
"""
Return the metrics assertion coverage
"""
num_metrics = len(self._metrics)
num_asserted = len(self._asserted)
if num_metrics == 0:
if num_asserted == 0:
return 100
else:
return 0
return num_asserted / num_metrics * 100 |
java | @Override
public CreateQualificationTypeResult createQualificationType(CreateQualificationTypeRequest request) {
request = beforeClientExecution(request);
return executeCreateQualificationType(request);
} |
python | def run_shell(cmd: str,
out: Optional[Union[TeeCapture, IO[str]]] = sys.stdout,
err: Optional[Union[TeeCapture, IO[str]]] = sys.stderr,
raise_on_fail: bool = True,
log_run_to_stderr: bool = True,
**kwargs
) -> CommandOutput:
"""Invokes a shell command and waits for it to finish.
Args:
cmd: The command line string to execute, e.g. "echo dog | cat > file".
out: Where to write the process' stdout. Defaults to sys.stdout. Can be
anything accepted by print's 'file' parameter, or None if the
output should be dropped, or a TeeCapture instance. If a TeeCapture
instance is given, the first element of the returned tuple will be
the captured output.
err: Where to write the process' stderr. Defaults to sys.stderr. Can be
anything accepted by print's 'file' parameter, or None if the
output should be dropped, or a TeeCapture instance. If a TeeCapture
instance is given, the second element of the returned tuple will be
the captured error output.
raise_on_fail: If the process returns a non-zero error code
and this flag is set, a CalledProcessError will be raised.
Otherwise the return code is the third element of the returned
tuple.
log_run_to_stderr: Determines whether the fact that this shell command
was executed is logged to sys.stderr or not.
**kwargs: Extra arguments for asyncio.create_subprocess_shell, such as
a cwd (current working directory) argument.
Returns:
A (captured output, captured error output, return code) triplet. The
captured outputs will be None if the out or err parameters were not set
to an instance of TeeCapture.
Raises:
subprocess.CalledProcessError: The process returned a non-zero error
code and raise_on_fail was set.
"""
if log_run_to_stderr:
print('shell:', cmd, file=sys.stderr)
result = asyncio.get_event_loop().run_until_complete(
_async_wait_for_process(
asyncio.create_subprocess_shell(
cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
**kwargs),
out,
err))
if raise_on_fail and result[2]:
raise subprocess.CalledProcessError(result[2], cmd)
return result |
java | public void init(EntityConfig config) {
this.source = (Phrase) config.getValue(SOURCE);
this.header_name = (Phrase) config.getValue(HEADER_NAME);
} |
java | static private Object effectiveParamValue(
Map<String, Object> parameters,
String paramName)
{
String upper = paramName.toUpperCase();
Object value = parameters.get(upper);
if (value != null)
{
return value;
}
value = defaultParameters.get(upper);
if (value != null)
{
return value;
}
logger.debug("Unknown Common Parameter: {}", paramName);
return null;
} |
java | public int compareTo(Day object)
{
if (object == null)
throw new IllegalArgumentException("day cannot be null");
Day day = (Day)object;
return localDate.compareTo(day.localDate);
} |
java | public static boolean branchExists(String branch, String baseUrl) {
CommandLine cmdLine = new CommandLine(SVN_CMD);
cmdLine.addArgument(CMD_LOG);
cmdLine.addArgument(OPT_XML);
addDefaultArguments(cmdLine, null, null);
cmdLine.addArgument("-r");
cmdLine.addArgument("HEAD:HEAD");
cmdLine.addArgument(baseUrl + branch);
try (InputStream inStr = ExecutionHelper.getCommandResult(cmdLine, new File("."), 0, 120000)) {
return true;
} catch (IOException e) {
log.log(Level.FINE, "Branch " + branch + " not found or other error", e);
return false;
}
} |
python | def as_unicode(obj, encoding=convert.LOCALE, pretty=False):
"""
Representing any object to <unicode> string (python2.7) or <str> string (python3.0).
:param obj: any object
:type encoding: str
:param encoding: codec for encoding unicode strings
(locale.getpreferredencoding() by default)
:type pretty: bool
:param pretty: pretty print
:rtype: unicode
:return: any object as unicode string
"""
return convert.convert(obj, encoding, 0 if pretty else None) |
python | def deploy_project():
"""
Deploy to the project directory in the virtualenv
"""
project_root = '/'.join([deployment_root(),'env',env.project_fullname,'project'])
local_dir = os.getcwd()
if env.verbosity:
print env.host,"DEPLOYING project", env.project_fullname
#Exclude a few things that we don't want deployed as part of the project folder
rsync_exclude = ['local_settings*','*.pyc','*.log','.*','/build','/dist','/media*','/static*','/www','/public','/template*']
#make site local settings if they don't already exist
_make_local_sitesettings()
created = deploy_files(local_dir, project_root, rsync_exclude=rsync_exclude)
if not env.patch:
#hook the project into sys.path
pyvers = run('python -V').split(' ')[1].split('.')[0:2] #Python x.x.x
sitepackages = ''.join(['lib/python',pyvers[0],'.',pyvers[1],'/site-packages'])
link_name = '/'.join([deployment_root(),'env',env.project_fullname,sitepackages,env.project_package_name])
target = '/'.join([project_root,env.project_package_name])
run(' '.join(['ln -s',target,link_name]))
#make sure manage.py has exec permissions
managepy = '/'.join([target,'sitesettings','manage.py'])
if exists(managepy):
sudo('chmod ugo+x %s'% managepy)
return created |
java | private <T> T mergeObjectEncodedAsGroup(T value, final Schema<T> schema) throws IOException
{
// if (recursionDepth >= recursionLimit) {
// throw ProtobufException.recursionLimitExceeded();
// }
// ++recursionDepth;
if (value == null)
{
value = schema.newMessage();
}
schema.mergeFrom(this, value);
if (!schema.isInitialized(value))
{
throw new UninitializedMessageException(value, schema);
}
// handling is in #readFieldNumber
checkLastTagWas(0);
// --recursionDepth;
return value;
} |
python | def download(link, out):
"""
Downloading data from websites, such as previously acquired physiological signals, is an extremely relevant task,
taking into consideration that, without data, processing cannot take place.
With the current function a file can be easily downloaded through the "link" input.
----------
Parameters
----------
link : str
String with the url that contains the file to be downloaded.
out : str
Name of the downloaded file (with extension). A destination path can also be included.
"""
# [Source: https://stackoverflow.com/questions/7243750/download-file-from-web-in-python-3]
r = requests.get(link)
with open(out, 'wb') as outfile:
outfile.write(r.content) |
java | synchronized void closeSocket() {
@Nullable SocketChannel clientChannel = this.clientChannel;
if (clientChannel != null) {
try {
clientChannel.socket().shutdownInput();
} catch (ClosedChannelException ignored) {
} catch (IOException e) {
Jvm.debug().on(getClass(), e);
}
try {
clientChannel.socket().shutdownOutput();
} catch (ClosedChannelException ignored) {
} catch (IOException e) {
Jvm.debug().on(getClass(), e);
}
Closeable.closeQuietly(clientChannel);
this.clientChannel = null;
if (LOG.isDebugEnabled())
Jvm.debug().on(getClass(), "closing", new StackTrace("only added for logging - please ignore !"));
@NotNull final TcpSocketConsumer tcpSocketConsumer = this.tcpSocketConsumer;
tcpSocketConsumer.tid = 0;
tcpSocketConsumer.omap.clear();
onDisconnected();
}
} |
java | public static <T extends MethodDescription> ElementMatcher.Junction<T> takesGenericArguments(Type... type) {
return takesGenericArguments(new TypeList.Generic.ForLoadedTypes(type));
} |
python | def attachIterator(self, login, tableName, setting, scopes):
"""
Parameters:
- login
- tableName
- setting
- scopes
"""
self.send_attachIterator(login, tableName, setting, scopes)
self.recv_attachIterator() |
java | public static <T extends Object> T[] insert (T[] values, T value, int index)
{
@SuppressWarnings("unchecked")
T[] nvalues = (T[])Array.newInstance(values.getClass().getComponentType(), values.length+1);
if (index > 0) {
System.arraycopy(values, 0, nvalues, 0, index);
}
nvalues[index] = value;
if (index < values.length) {
System.arraycopy(values, index, nvalues, index+1, values.length-index);
}
return nvalues;
} |
java | public List<NamedAttributeNode<NamedEntityGraph<T>>> getAllNamedAttributeNode()
{
List<NamedAttributeNode<NamedEntityGraph<T>>> list = new ArrayList<NamedAttributeNode<NamedEntityGraph<T>>>();
List<Node> nodeList = childNode.get("named-attribute-node");
for(Node node: nodeList)
{
NamedAttributeNode<NamedEntityGraph<T>> type = new NamedAttributeNodeImpl<NamedEntityGraph<T>>(this, "named-attribute-node", childNode, node);
list.add(type);
}
return list;
} |
python | def load_import_keychain( cls, working_dir, namespace_id ):
"""
Get an import keychain from disk.
Return None if it doesn't exist.
"""
# do we have a cached one on disk?
cached_keychain = os.path.join(working_dir, "%s.keychain" % namespace_id)
if os.path.exists( cached_keychain ):
log.debug("Load import keychain '%s'" % cached_keychain)
child_addrs = []
try:
lines = []
with open(cached_keychain, "r") as f:
lines = f.readlines()
child_attrs = [l.strip() for l in lines]
log.debug("Loaded cached import keychain for '%s'" % namespace_id)
return child_attrs
except Exception, e:
log.exception(e)
log.error("FATAL: uncaught exception loading the import keychain")
os.abort()
else:
log.debug("No import keychain at '%s'" % cached_keychain)
return None |
python | def build_pwm():
""" Builds source with Python 2.7 and 3.2, and tests import """
with cd("/tmp/source/c_pwm"):
test = "import _PWM; print(_PWM.VERSION)"
run("make py2.7")
run('sudo python2.7 -c "%s"' % test)
run("cp _PWM.so ../RPIO/PWM/")
run("mv _PWM.so ../RPIO/PWM/_PWM27.so")
run("make py3.2")
run('python3.2 -c "%s"' % test)
run("mv _PWM.so ../RPIO/PWM/_PWM32.so") |
python | def HandleExceptionsAndRebuildHttpConnections(retry_args):
"""Exception handler for http failures.
This catches known failures and rebuilds the underlying HTTP connections.
Args:
retry_args: An ExceptionRetryArgs tuple.
"""
# If the server indicates how long to wait, use that value. Otherwise,
# calculate the wait time on our own.
retry_after = None
# Transport failures
if isinstance(retry_args.exc, (http_client.BadStatusLine,
http_client.IncompleteRead,
http_client.ResponseNotReady)):
logging.debug('Caught HTTP error %s, retrying: %s',
type(retry_args.exc).__name__, retry_args.exc)
elif isinstance(retry_args.exc, socket.error):
logging.debug('Caught socket error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.gaierror):
logging.debug(
'Caught socket address error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.timeout):
logging.debug(
'Caught socket timeout error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, httplib2.ServerNotFoundError):
logging.debug(
'Caught server not found error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, ValueError):
# oauth2client tries to JSON-decode the response, which can result
# in a ValueError if the response was invalid. Until that is fixed in
# oauth2client, need to handle it here.
logging.debug('Response content was invalid (%s), retrying',
retry_args.exc)
elif (isinstance(retry_args.exc, TokenRefreshError) and
hasattr(retry_args.exc, 'status') and
(retry_args.exc.status == TOO_MANY_REQUESTS or
retry_args.exc.status >= 500)):
logging.debug(
'Caught transient credential refresh error (%s), retrying',
retry_args.exc)
elif isinstance(retry_args.exc, exceptions.RequestError):
logging.debug('Request returned no response, retrying')
# API-level failures
elif isinstance(retry_args.exc, exceptions.BadStatusCodeError):
logging.debug('Response returned status %s, retrying',
retry_args.exc.status_code)
elif isinstance(retry_args.exc, exceptions.RetryAfterError):
logging.debug('Response returned a retry-after header, retrying')
retry_after = retry_args.exc.retry_after
else:
raise retry_args.exc
RebuildHttpConnections(retry_args.http)
logging.debug('Retrying request to url %s after exception %s',
retry_args.http_request.url, retry_args.exc)
time.sleep(
retry_after or util.CalculateWaitForRetry(
retry_args.num_retries, max_wait=retry_args.max_retry_wait)) |
java | @Override
public Tensor forward() {
Tensor x = modInX.getOutput();
y = new Tensor(x); // copy
y.exp();
return y;
} |
java | @SubscribeEvent
public void onGetCollisionBoxes(GetCollisionBoxesEvent event)
// public void getCollisionBoxes(World world, AxisAlignedBB mask, List<AxisAlignedBB> list, Entity entity)
{
//no mask, no need to check for collision
if (event.getAabb() == null)
return;
for (Chunk chunk : ChunkBlockHandler.getAffectedChunks(event.getWorld(), event.getAabb()))
collisionRegistry.processCallbacks(chunk, event.getAabb(), event.getCollisionBoxesList());
} |
java | @Override
public void internalWrite(Server server, Query query, ImmutableList<Result> results) throws Exception {
String gatewayMessage = getGatewayMessage(results);
// message won't be returned if there are no numeric values in the query results
if (gatewayMessage != null) {
logger.info(gatewayMessage);
doSend(gatewayMessage);
}
} |
java | static long limitExpiryToMaxLinger(long now, long _maxLinger, long _requestedExpiryTime, boolean _sharpExpiryEnabled) {
if (_sharpExpiryEnabled && _requestedExpiryTime > ExpiryPolicy.REFRESH && _requestedExpiryTime < ExpiryPolicy.ETERNAL) {
_requestedExpiryTime = -_requestedExpiryTime;
}
return Expiry.mixTimeSpanAndPointInTime(now, _maxLinger, _requestedExpiryTime);
} |
python | def delete(self, cartname):
"""
`cartname` - name of the cart to delete
Delete a cart both from your local filesystem and the mongo database
"""
cart = juicer.common.Cart.Cart(cart_name=cartname)
cart.implode(self._defaults['start_in']) |
java | public void setColor(final Color COLOR) {
if (null == color) {
_color = COLOR;
fireSectionEvent(UPDATE_EVENT);
} else {
color.set(COLOR);
}
} |
java | public FilterSpec addExpression(FilterSpec expr) {
if (expressions == null) {
expressions = new ArrayList<>();
}
expressions.add((FilterSpec) expr);
return this;
} |
java | @Override
public void topPosition(BaseCell cell) {
if (cell != null) {
int pos = mGroupBasicAdapter.getComponents().indexOf(cell);
if (pos > 0) {
VirtualLayoutManager lm = getLayoutManager();
View view = lm.findViewByPosition(pos);
if (view != null) {
int top = lm.getDecoratedTop(view);
RecyclerView recyclerView = getContentView();
if (recyclerView != null) {
recyclerView.scrollBy(0, top);
}
} else {
RecyclerView recyclerView = getContentView();
if (recyclerView != null) {
recyclerView.scrollToPosition(pos);
}
}
}
}
} |
python | def rollback():
"""
Rolls back to previous release
"""
init_tasks()
run_hook("before_rollback")
# Remove current version
current_release = paths.get_current_release_path()
if current_release:
env.run("rm -rf %s" % current_release)
# Restore previous version
old_release = paths.get_current_release_name()
if old_release:
paths.symlink(paths.get_source_path(old_release),
paths.get_current_path())
run_hook("rollback")
run_hook("after_rollback")
logger.info("Rollback complete") |
python | def match(self, item):
""" Return True if filter matches item.
"""
val = getattr(item, self._name) or False
return bool(val) is self._value |
python | def __looks_like_html(response):
"""Guesses entity type when Content-Type header is missing.
Since Content-Type is not strictly required, some servers leave it out.
"""
text = response.text.lstrip().lower()
return text.startswith('<html') or text.startswith('<!doctype') |
python | def _get_weight_size(self, data, n_local_subj):
"""Calculate the size of weight for this process
Parameters
----------
data : a list of 2D array, each in shape [n_voxel, n_tr]
The fMRI data from multi-subject.
n_local_subj : int
Number of subjects allocated to this process.
Returns
-------
weight_size : 1D array
The size of total subject weight on this process.
local_weight_offset : 1D array
Number of elements away from the first element
in the combined weight array at which to begin
the new, segmented array for a subject
"""
weight_size = np.zeros(1).astype(int)
local_weight_offset = np.zeros(n_local_subj).astype(int)
for idx, subj_data in enumerate(data):
if idx > 0:
local_weight_offset[idx] = weight_size[0]
weight_size[0] += self.K * subj_data.shape[1]
return weight_size, local_weight_offset |
java | private static <T extends MPBase> JsonObject generatePayload(HttpMethod httpMethod, T resource) {
JsonObject payload = null;
if (httpMethod.equals(HttpMethod.POST) ||
(httpMethod.equals(HttpMethod.PUT) && resource._lastKnownJson == null)) {
payload = MPCoreUtils.getJsonFromResource(resource);
} else if (httpMethod.equals(HttpMethod.PUT)) {
JsonObject actualJson = MPCoreUtils.getJsonFromResource(resource);
Type mapType = new TypeToken<Map<String, Object>>(){}.getType();
Gson gson = new Gson();
Map<String, Object> oldMap = gson.fromJson(resource._lastKnownJson, mapType);
Map<String, Object> newMap = gson.fromJson(actualJson, mapType);
MapDifference<String, Object> mapDifferences = Maps.difference(oldMap, newMap);
payload = new JsonObject();
for (Map.Entry<String, MapDifference.ValueDifference<Object>> entry : mapDifferences.entriesDiffering().entrySet()) {
if (entry.getValue().rightValue() instanceof LinkedTreeMap) {
JsonElement jsonObject = gson.toJsonTree(entry.getValue().rightValue()).getAsJsonObject();
payload.add(entry.getKey(), jsonObject);
} else {
if (entry.getValue().rightValue() instanceof Boolean) {
payload.addProperty(entry.getKey(), (Boolean)entry.getValue().rightValue());
} else if (entry.getValue().rightValue() instanceof Number) {
payload.addProperty(entry.getKey(), (Number)entry.getValue().rightValue());
} else {
payload.addProperty(entry.getKey(), entry.getValue().rightValue().toString());
}
}
}
for (Map.Entry<String, Object> entry : mapDifferences.entriesOnlyOnRight().entrySet()) {
if (entry.getValue() instanceof Boolean) {
payload.addProperty(entry.getKey(), (Boolean)entry.getValue());
} else if (entry.getValue() instanceof Number) {
payload.addProperty(entry.getKey(), (Number)entry.getValue());
} else {
payload.addProperty(entry.getKey(), entry.getValue().toString());
}
}
}
return payload;
} |
python | def as_dict(self):
"""
Json-serializable dict representation of COHP.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"are_coops": self.are_coops,
"efermi": self.efermi,
"energies": self.energies.tolist(),
"COHP": {str(spin): pops.tolist()
for spin, pops in self.cohp.items()}}
if self.icohp:
d["ICOHP"] = {str(spin): pops.tolist()
for spin, pops in self.icohp.items()}
return d |
java | private void readObject(java.io.ObjectInputStream s)
throws IOException, ClassNotFoundException
{
// Read in the length, threshold, and loadfactor
s.defaultReadObject();
// Read the original length of the array and number of elements
int origlength = s.readInt();
int elements = s.readInt();
// Compute new size with a bit of room 5% to grow but
// No larger than the original size. Make the length
// odd if it's large enough, this helps distribute the entries.
// Guard against the length ending up zero, that's not valid.
int length = (int)(elements * loadFactor) + (elements / 20) + 3;
if (length > elements && (length & 1) == 0)
length--;
if (origlength > 0 && length > origlength)
length = origlength;
table = new FastHashtableEntry[length];
count = 0;
// Read the number of elements and then all the key/value objects
for (; elements > 0; elements--) {
Object key = s.readObject();
Object value = s.readObject();
put(key, value);
}
} |
python | def set_inode(self, ino):
# type: (inode.Inode) -> None
'''
A method to set the Inode associated with this El Torito Entry.
Parameters:
ino - The Inode object corresponding to this entry.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('El Torito Entry not yet initialized')
self.inode = ino |
java | private static StateUpdater createCheck(Cursor cursor, SelectorModel model, AtomicReference<StateUpdater> start)
{
return (extrp, current) ->
{
if (model.isEnabled() && cursor.getClick() == 0)
{
return start.get();
}
return current;
};
} |
python | def get_offset(self, x, y):
"""
Computes how far away and at what angle a coordinate is
located.
Distance is returned in feet, angle is returned in degrees
:returns: distance,angle offset of the given x,y coordinate
.. versionadded:: 2018.1.7
"""
with self._lock:
dx = self.x - x
dy = self.y - y
distance = math.hypot(dx, dy)
angle = math.atan2(dy, dx)
return distance, math.degrees(angle) |
python | def read_tcp(self, length):
"""Read Transmission Control Protocol (TCP).
Structure of TCP header [RFC 793]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Source Port | Destination Port |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Sequence Number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Acknowledgement Number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Data | |U|A|P|R|S|F| |
| Offset| Reserved |R|C|S|S|Y|I| Window |
| | |G|K|H|T|N|N| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Checksum | Urgent Pointer |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Options | Padding |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Octets Bits Name Description
0 0 tcp.srcport Source Port
2 16 tcp.dstport Destination Port
4 32 tcp.seq Sequence Number
8 64 tcp.ack Acknowledgement Number (if ACK set)
12 96 tcp.hdr_len Data Offset
12 100 - Reserved (must be zero)
12 103 tcp.flags.ns ECN Concealment Protection (NS)
13 104 tcp.flags.cwr Congestion Window Reduced (CWR)
13 105 tcp.flags.ece ECN-Echo (ECE)
13 106 tcp.flags.urg Urgent (URG)
13 107 tcp.flags.ack Acknowledgement (ACK)
13 108 tcp.flags.psh Push Function (PSH)
13 109 tcp.flags.rst Reset Connection (RST)
13 110 tcp.flags.syn Synchronize Sequence Numbers (SYN)
13 111 tcp.flags.fin Last Packet from Sender (FIN)
14 112 tcp.window_size Size of Receive Window
16 128 tcp.checksum Checksum
18 144 tcp.urgent_pointer Urgent Pointer (if URG set)
20 160 tcp.opt TCP Options (if data offset > 5)
"""
if length is None:
length = len(self)
_srcp = self._read_unpack(2)
_dstp = self._read_unpack(2)
_seqn = self._read_unpack(4)
_ackn = self._read_unpack(4)
_lenf = self._read_binary(1)
_flag = self._read_binary(1)
_wins = self._read_unpack(2)
_csum = self._read_fileng(2)
_urgp = self._read_unpack(2)
tcp = dict(
srcport=_srcp,
dstport=_dstp,
seq=_seqn,
ack=_ackn,
hdr_len=int(_lenf[:4], base=2) * 4,
flags=dict(
ns=True if int(_lenf[7]) else False,
cwr=True if int(_flag[0]) else False,
ece=True if int(_flag[1]) else False,
urg=True if int(_flag[2]) else False,
ack=True if int(_flag[3]) else False,
psh=True if int(_flag[4]) else False,
rst=True if int(_flag[5]) else False,
syn=True if int(_flag[6]) else False,
fin=True if int(_flag[7]) else False,
),
window_size=_wins,
checksum=_csum,
urgent_pointer=_urgp,
)
# packet type flags
self._syn = True if int(_flag[6]) else False
self._ack = True if int(_flag[3]) else False
_hlen = tcp['hdr_len']
_optl = _hlen - 20
if _optl:
options = self._read_tcp_options(_optl)
tcp['opt'] = options[0] # tuple of option acronyms
tcp.update(options[1]) # merge option info to buffer
length -= _hlen
tcp['packet'] = self._read_packet(header=_hlen, payload=length)
return self._decode_next_layer(tcp, None, length) |
java | public void generateArchetypesFromGitRepoList(File file, File outputDir, List<String> dirs) throws IOException {
File cloneParentDir = new File(outputDir, "../git-clones");
if (cloneParentDir.exists()) {
Files.recursiveDelete(cloneParentDir);
}
Properties properties = new Properties();
try (FileInputStream is = new FileInputStream(file)) {
properties.load(is);
}
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
LinkedList<String> values = new LinkedList<>(Arrays.asList(((String) entry.getValue()).split("\\|")));
String gitrepo = values.removeFirst();
String tag = values.isEmpty() ? null : values.removeFirst();
generateArchetypeFromGitRepo(outputDir, dirs, cloneParentDir, (String)entry.getKey(), gitrepo, tag);
}
} |
python | def com_google_fonts_check_name_license_url(ttFont, familyname):
""""License URL matches License text on name table?"""
from fontbakery.constants import PLACEHOLDER_LICENSING_TEXT
LEGACY_UFL_FAMILIES = ["Ubuntu", "UbuntuCondensed", "UbuntuMono"]
LICENSE_URL = {
'OFL.txt': 'http://scripts.sil.org/OFL',
'LICENSE.txt': 'http://www.apache.org/licenses/LICENSE-2.0',
'UFL.txt': 'https://www.ubuntu.com/legal/terms-and-policies/font-licence'
}
LICENSE_NAME = {
'OFL.txt': 'Open Font',
'LICENSE.txt': 'Apache',
'UFL.txt': 'Ubuntu Font License'
}
detected_license = False
for license in ['OFL.txt', 'LICENSE.txt', 'UFL.txt']:
placeholder = PLACEHOLDER_LICENSING_TEXT[license]
for nameRecord in ttFont['name'].names:
string = nameRecord.string.decode(nameRecord.getEncoding())
if nameRecord.nameID == NameID.LICENSE_DESCRIPTION and\
string == placeholder:
detected_license = license
break
if detected_license == "UFL.txt" and familyname not in LEGACY_UFL_FAMILIES:
yield FAIL, Message("ufl",
("The Ubuntu Font License is only acceptable on"
" the Google Fonts collection for legacy font"
" families that already adopted such license."
" New Families should use eigther Apache or"
" Open Font License."))
else:
found_good_entry = False
if detected_license:
failed = False
expected = LICENSE_URL[detected_license]
for nameRecord in ttFont['name'].names:
if nameRecord.nameID == NameID.LICENSE_INFO_URL:
string = nameRecord.string.decode(nameRecord.getEncoding())
if string == expected:
found_good_entry = True
else:
failed = True
yield FAIL, Message("licensing-inconsistency",
("Licensing inconsistency in name table"
" entries! NameID={} (LICENSE DESCRIPTION)"
" indicates {} licensing, but NameID={}"
" (LICENSE URL) has '{}'. Expected: '{}'"
"").format(NameID.LICENSE_DESCRIPTION,
LICENSE_NAME[detected_license],
NameID.LICENSE_INFO_URL,
string, expected))
if not found_good_entry:
yield FAIL, Message("no-license-found",
("A known license URL must be provided in the"
" NameID {} (LICENSE INFO URL) entry."
" Currently accepted licenses are Apache or"
" Open Font License. For a small set of legacy"
" families the Ubuntu Font License may be"
" acceptable as well."
"").format(NameID.LICENSE_INFO_URL))
else:
if failed:
yield FAIL, Message("bad-entries",
("Even though a valid license URL was seen in"
" NAME table, there were also bad entries."
" Please review NameIDs {} (LICENSE DESCRIPTION)"
" and {} (LICENSE INFO URL)."
"").format(NameID.LICENSE_DESCRIPTION,
NameID.LICENSE_INFO_URL))
else:
yield PASS, "Font has a valid license URL in NAME table." |
python | def T(self):
""" Looks for the temperature in the catalogue, if absent it calculates it using calcTemperature()
:return: planet temperature
"""
paramTemp = self.getParam('temperature')
if not paramTemp is np.nan:
return paramTemp
elif ed_params.estimateMissingValues:
self.flags.addFlag('Calculated Temperature')
return self.calcTemperature()
else:
return np.nan |
java | private void writeText(String value) {
if(this.rtfParser.isNewGroup()) {
this.rtfDoc.add(new RtfDirectContent("{"));
this.rtfParser.setNewGroup(false);
}
if(value.length() > 0) {
this.rtfDoc.add(new RtfDirectContent(value));
}
} |
python | def _getbugs(self, idlist, permissive,
include_fields=None, exclude_fields=None, extra_fields=None):
"""
Return a list of dicts of full bug info for each given bug id.
bug ids that couldn't be found will return None instead of a dict.
"""
oldidlist = idlist
idlist = []
for i in oldidlist:
try:
idlist.append(int(i))
except ValueError:
# String aliases can be passed as well
idlist.append(i)
extra_fields = self._listify(extra_fields or [])
extra_fields += self._getbug_extra_fields
getbugdata = {"ids": idlist}
if permissive:
getbugdata["permissive"] = 1
getbugdata.update(self._process_include_fields(
include_fields, exclude_fields, extra_fields))
r = self._proxy.Bug.get(getbugdata)
if self._check_version(4, 0):
bugdict = dict([(b['id'], b) for b in r['bugs']])
else:
bugdict = dict([(b['id'], b['internals']) for b in r['bugs']])
ret = []
for i in idlist:
found = None
if i in bugdict:
found = bugdict[i]
else:
# Need to map an alias
for valdict in bugdict.values():
if i in self._listify(valdict.get("alias", None)):
found = valdict
break
ret.append(found)
return ret |
python | def _fake_setqualifier(self, namespace, **params):
"""
Implements a server responder for
:meth:`pywbem.WBEMConnection.SetQualifier`.
Create or modify a qualifier declaration in the local repository of this
class. This method will create a new namespace for the qualifier
if none is defined.
Raises:
CIMError: CIM_ERR_INVALID_PARAMETER
CIMError: CIM_ERR_ALREADY_EXISTS
"""
# TODO:ks FUTURE implement set... method for instance, qualifier, class
# as general means to put new data into the repo.
# Validate namespace
qualifier_repo = self._get_qualifier_repo(namespace)
qual_decl = params['QualifierDeclaration']
if not isinstance(qual_decl, CIMQualifierDeclaration):
raise CIMError(
CIM_ERR_INVALID_PARAMETER,
_format("QualifierDeclaration parameter is not a valid "
"valid CIMQualifierDeclaration. Rcvd type={0}",
type(qual_decl)))
if qual_decl.name in qualifier_repo:
raise CIMError(
CIM_ERR_ALREADY_EXISTS,
_format("Qualifier declaration {0!A} already exists in "
"namespace {1!A}.", qual_decl.name, namespace))
self._init_qualifier_decl(qual_decl, qualifier_repo)
qualifier_repo[qual_decl.name] = qual_decl |
python | def is_feasible_i(self, x, i):
"""return True if value ``x`` is in the invertible domain of
variable ``i``
"""
lb = self._lb[self._index(i)]
ub = self._ub[self._index(i)]
al = self._al[self._index(i)]
au = self._au[self._index(i)]
return lb - al < x < ub + au |
java | public void removeConverter(final TypeConverter tc) {
if (tc.getSupportedTypes() == null) {
untypedTypeEncoders.remove(tc);
registeredConverterClasses.remove(tc.getClass());
} else {
for (final Entry<Class, List<TypeConverter>> entry : tcMap.entrySet()) {
List<TypeConverter> list = entry.getValue();
if (list.contains(tc)) {
list.remove(tc);
}
if (list.isEmpty()) {
tcMap.remove(entry.getKey());
}
}
registeredConverterClasses.remove(tc.getClass());
}
} |
python | def __user_location(__pkg: str, type_) -> str:
"""Utility function to look up XDG basedir locations
Args:
__pkg: Package name
__type: Location type
"""
if ALLOW_DARWIN and sys.platform == 'darwin':
user_dir = '~/Library/{}'.format(__LOCATIONS[type_][0])
else:
user_dir = getenv('XDG_{}_HOME'.format(type_.upper()),
path.sep.join([getenv('HOME', ''),
__LOCATIONS[type_][1]]))
return path.expanduser(path.sep.join([user_dir, __pkg])) |
java | public static BookmarkablePageLink<String> newBookmarkablePageLink(final String linkId,
final Class<? extends Page> pageClass, final String labelId, final String resourceModelKey,
final Object[] parameters, final String defaultValue, final Component component)
{
return newBookmarkablePageLink(linkId, pageClass, labelId, ResourceBundleKey.builder()
.key(resourceModelKey).parameters(parameters).defaultValue(defaultValue).build(),
component);
} |
python | def _process_input_seed(seed_photon_fields):
"""
take input list of seed_photon_fields and fix them into usable format
"""
Tcmb = 2.72548 * u.K # 0.00057 K
Tfir = 30 * u.K
ufir = 0.5 * u.eV / u.cm ** 3
Tnir = 3000 * u.K
unir = 1.0 * u.eV / u.cm ** 3
# Allow for seed_photon_fields definitions of the type 'CMB-NIR-FIR' or
# 'CMB'
if type(seed_photon_fields) != list:
seed_photon_fields = seed_photon_fields.split("-")
result = OrderedDict()
for idx, inseed in enumerate(seed_photon_fields):
seed = {}
if isinstance(inseed, six.string_types):
name = inseed
seed["type"] = "thermal"
if inseed == "CMB":
seed["T"] = Tcmb
seed["u"] = ar * Tcmb ** 4
seed["isotropic"] = True
elif inseed == "FIR":
seed["T"] = Tfir
seed["u"] = ufir
seed["isotropic"] = True
elif inseed == "NIR":
seed["T"] = Tnir
seed["u"] = unir
seed["isotropic"] = True
else:
log.warning(
"Will not use seed {0} because it is not "
"CMB, FIR or NIR".format(inseed)
)
raise TypeError
elif type(inseed) == list and (
len(inseed) == 3 or len(inseed) == 4
):
isotropic = len(inseed) == 3
if isotropic:
name, T, uu = inseed
seed["isotropic"] = True
else:
name, T, uu, theta = inseed
seed["isotropic"] = False
seed["theta"] = validate_scalar(
"{0}-theta".format(name), theta, physical_type="angle"
)
thermal = T.unit.physical_type == "temperature"
if thermal:
seed["type"] = "thermal"
validate_scalar(
"{0}-T".format(name),
T,
domain="positive",
physical_type="temperature",
)
seed["T"] = T
if uu == 0:
seed["u"] = ar * T ** 4
else:
# pressure has same physical type as energy density
validate_scalar(
"{0}-u".format(name),
uu,
domain="positive",
physical_type="pressure",
)
seed["u"] = uu
else:
seed["type"] = "array"
# Ensure everything is in arrays
T = u.Quantity((T,)).flatten()
uu = u.Quantity((uu,)).flatten()
seed["energy"] = validate_array(
"{0}-energy".format(name),
T,
domain="positive",
physical_type="energy",
)
if np.isscalar(seed["energy"]) or seed["energy"].size == 1:
seed["photon_density"] = validate_scalar(
"{0}-density".format(name),
uu,
domain="positive",
physical_type="pressure",
)
else:
if uu.unit.physical_type == "pressure":
uu /= seed["energy"] ** 2
seed["photon_density"] = validate_array(
"{0}-density".format(name),
uu,
domain="positive",
physical_type="differential number density",
)
else:
raise TypeError(
"Unable to process seed photon"
" field: {0}".format(inseed)
)
result[name] = seed
return result |
java | public Elements not(String query) {
Elements out = Selector.select(query, this);
return Selector.filterOut(this, out);
} |
python | def get_cpu_cores_per_run(coreLimit, num_of_threads, my_cgroups, coreSet=None):
"""
Calculate an assignment of the available CPU cores to a number
of parallel benchmark executions such that each run gets its own cores
without overlapping of cores between runs.
In case the machine has hyper-threading, this method tries to avoid
putting two different runs on the same physical core
(but it does not guarantee this if the number of parallel runs is too high to avoid it).
In case the machine has multiple CPUs, this method avoids
splitting a run across multiple CPUs if the number of cores per run
is lower than the number of cores per CPU
(splitting a run over multiple CPUs provides worse performance).
It will also try to split the runs evenly across all available CPUs.
A few theoretically-possible cases are not implemented,
for example assigning three 10-core runs on a machine
with two 16-core CPUs (this would have unfair core assignment
and thus undesirable performance characteristics anyway).
The list of available cores is read from the cgroup file system,
such that the assigned cores are a subset of the cores
that the current process is allowed to use.
This script does currently not support situations
where the available cores are asymmetrically split over CPUs,
e.g. 3 cores on one CPU and 5 on another.
@param coreLimit: the number of cores for each run
@param num_of_threads: the number of parallel benchmark executions
@param coreSet: the list of CPU cores identifiers provided by a user, None makes benchexec using all cores
@return a list of lists, where each inner list contains the cores for one run
"""
try:
# read list of available CPU cores
allCpus = util.parse_int_list(my_cgroups.get_value(cgroups.CPUSET, 'cpus'))
# Filter CPU cores according to the list of identifiers provided by a user
if coreSet:
invalid_cores = sorted(set(coreSet).difference(set(allCpus)))
if len(invalid_cores) > 0:
raise ValueError("The following provided CPU cores are not available: {}".format(', '.join(map(str, invalid_cores))))
allCpus = [core for core in allCpus if core in coreSet]
logging.debug("List of available CPU cores is %s.", allCpus)
# read mapping of core to CPU ("physical package")
physical_packages = [get_cpu_package_for_core(core) for core in allCpus]
cores_of_package = collections.defaultdict(list)
for core, package in zip(allCpus, physical_packages):
cores_of_package[package].append(core)
logging.debug("Physical packages of cores are %s.", cores_of_package)
# read hyper-threading information (sibling cores sharing the same physical core)
siblings_of_core = {}
for core in allCpus:
siblings = util.parse_int_list(util.read_file('/sys/devices/system/cpu/cpu{0}/topology/thread_siblings_list'.format(core)))
siblings_of_core[core] = siblings
logging.debug("Siblings of cores are %s.", siblings_of_core)
except ValueError as e:
sys.exit("Could not read CPU information from kernel: {0}".format(e))
return _get_cpu_cores_per_run0(coreLimit, num_of_threads, allCpus, cores_of_package, siblings_of_core) |
java | public <OUT> DataStreamSource<OUT> readFile(FileInputFormat<OUT> inputFormat,
String filePath) {
return readFile(inputFormat, filePath, FileProcessingMode.PROCESS_ONCE, -1);
} |
java | public void execute() throws MojoExecutionException, MojoFailureException {
if (skip) {
getLog().info("Skipping format check");
return;
}
if ("pom".equals(packaging)) {
getLog().info("Skipping format check: project uses 'pom' packaging");
return;
}
if (skipSortingImports) {
getLog().info("Skipping sorting imports");
}
List<File> directoriesToFormat = new ArrayList<File>();
if (sourceDirectory.exists()) {
directoriesToFormat.add(sourceDirectory);
} else {
handleMissingDirectory("Source", sourceDirectory);
}
if (testSourceDirectory.exists()) {
directoriesToFormat.add(testSourceDirectory);
} else {
handleMissingDirectory("Test source", testSourceDirectory);
}
for (File additionalSourceDirectory : additionalSourceDirectories) {
if (additionalSourceDirectory.exists()) {
directoriesToFormat.add(additionalSourceDirectory);
} else {
handleMissingDirectory("Additional source", additionalSourceDirectory);
}
}
Formatter formatter = getFormatter();
for (File directoryToFormat : directoriesToFormat) {
formatSourceFilesInDirectory(directoryToFormat, formatter);
}
logNumberOfFilesProcessed();
postExecute(this.filesProcessed, this.nonComplyingFiles);
} |
java | public Observable<CertificateBundle> mergeCertificateAsync(String vaultBaseUrl, String certificateName, List<byte[]> x509Certificates) {
return mergeCertificateWithServiceResponseAsync(vaultBaseUrl, certificateName, x509Certificates).map(new Func1<ServiceResponse<CertificateBundle>, CertificateBundle>() {
@Override
public CertificateBundle call(ServiceResponse<CertificateBundle> response) {
return response.body();
}
});
} |
java | private long numDepthWiseParams(SeparableConvolution2D layerConf) {
int[] kernel = layerConf.getKernelSize();
val nIn = layerConf.getNIn();
val depthMultiplier = layerConf.getDepthMultiplier();
return nIn * depthMultiplier * kernel[0] * kernel[1];
} |
java | private StringBuffer generateReport() throws Exception {
Graph[] gs = browser.getGraphs();
StringBuffer result = new StringBuffer();
Hashtable stats=new Hashtable();
for (int k = 0; k < gs.length; k++) {
Graph g = gs[k];
this.generateADiagramReport(stats,g);
}
Object[] keys=new Vector(stats.keySet()).toArray();
Arrays.sort(keys);
StringBuffer report=new StringBuffer();
for (int k=0;k<keys.length;k++){
String key=keys[k].toString();
report.append(key);
for (int l=0;l<40-key.length();l++)
report.append(" ");
report.append(":"+stats.get(key)+"\n");
}
return report;
}
/**
* Generates a report for each diagram type
*
*@param g The diagram to be studied
*@param stats Stats collected so far
*/
private void generateADiagramReport(Hashtable stats,Graph g) {
GraphRelationship[] grels=g.getRelationships();
for (int k=0;k<grels.length;k++){
if (stats.containsKey(grels[k].getType())){
Integer old=(Integer)stats.get(grels[k].getType());
stats.put(grels[k].getType(),new Integer(old.intValue()+1));
} else {
stats.put(grels[k].getType(),new Integer(1));
}
}
GraphEntity[] ges;
try {
ges = g.getEntities();
for (int k=0;k<ges.length;k++){
if (stats.containsKey(ges[k].getType())){
Integer old=(Integer)stats.get(ges[k].getType());
stats.put(ges[k].getType(),new Integer(old.intValue()+1));
} else {
stats.put(ges[k].getType(),new Integer(1));
}
}
} catch (NullEntity e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Generates an stats report from a INGENIAS specification file (1st param)
*
*@param args Arguments typed in the command line. Only first one is attended
*@exception Exception Sth went wrong
*/
public static void main(String args[]) throws Exception {
ingenias.editor.Log.initInstance(new java.io.PrintWriter(System.err));
ExampleReportGenerator erg = new ExampleReportGenerator(args[0]);
erg.run();
System.exit(0);
}
} |
java | public Collection<BeanRule> getBeanRules() {
Collection<BeanRule> idBasedBeanRules = beanRuleRegistry.getIdBasedBeanRules();
Collection<Set<BeanRule>> typeBasedBeanRules = beanRuleRegistry.getTypeBasedBeanRules();
Collection<BeanRule> configurableBeanRules = beanRuleRegistry.getConfigurableBeanRules();
int capacity = idBasedBeanRules.size();
for (Set<BeanRule> brs : typeBasedBeanRules) {
capacity += brs.size();
}
capacity += configurableBeanRules.size();
capacity = (int)(capacity / 0.9f) + 1;
Set<BeanRule> beanRuleSet = new HashSet<>(capacity, 0.9f);
beanRuleSet.addAll(idBasedBeanRules);
for (Set<BeanRule> brs : typeBasedBeanRules) {
beanRuleSet.addAll(brs);
}
beanRuleSet.addAll(configurableBeanRules);
return beanRuleSet;
} |
java | public static byte[] toByteArray(String string, ProcessEngine processEngine) {
ProcessEngineConfigurationImpl processEngineConfiguration = ((ProcessEngineImpl) processEngine).getProcessEngineConfiguration();
Charset charset = processEngineConfiguration.getDefaultCharset();
return string.getBytes(charset);
} |
java | public void setEnabled(boolean isEnabled) {
mCardNumberEditText.setEnabled(isEnabled);
mExpiryDateEditText.setEnabled(isEnabled);
mCvcNumberEditText.setEnabled(isEnabled);
} |
java | @SuppressWarnings("unchecked")
public <V3, M4, C, N, Q> C getDiffuseColor(
AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) {
Property p = getProperty(PropertyKey.COLOR_DIFFUSE.m_key);
if (null == p || null == p.getData()) {
Object def = m_defaults.get(PropertyKey.COLOR_DIFFUSE);
if (def == null) {
return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f);
}
return (C) def;
}
return (C) p.getData();
} |
python | def get_exception():
"""Return full formatted traceback as a string."""
trace = ""
exception = ""
exc_list = traceback.format_exception_only(
sys.exc_info()[0], sys.exc_info()[1]
)
for entry in exc_list:
exception += entry
tb_list = traceback.format_tb(sys.exc_info()[2])
for entry in tb_list:
trace += entry
return "%s\n%s" % (exception, trace) |
java | public static JsonNode toJson(Object obj, ClassLoader classLoader) {
return SerializationUtils.toJson(obj, classLoader);
} |
java | public boolean stream(int bufferId) {
int frames = sectionSize;
boolean reset = false;
boolean more = true;
if (frames > songDuration) {
frames = songDuration;
reset = true;
}
ibxm.get_audio(data, frames);
bufferData.clear();
bufferData.put(data);
bufferData.limit(frames * 4);
if (reset) {
if (loop) {
ibxm.seek(0);
ibxm.set_module(module);
songDuration = ibxm.calculate_song_duration();
} else {
more = false;
songDuration -= frames;
}
} else {
songDuration -= frames;
}
bufferData.flip();
AL10.alBufferData(bufferId, AL10.AL_FORMAT_STEREO16, bufferData, 48000);
return more;
} |
java | public QualifiedName enclosingType() {
checkState(!isTopLevel(), "Cannot return enclosing type of top-level type");
return new QualifiedName(packageName, simpleNames.subList(0, simpleNames.size() - 1));
} |
python | def serial_number(self):
"""Return the serial number of the printer."""
try:
return self.data.get('identity').get('serial_num')
except (KeyError, AttributeError):
return self.device_status_simple('') |
java | public void close() throws IOException {
if (kids != null) {
for (RecordReader<K,? extends Writable> rr : kids) {
rr.close();
}
}
if (jc != null) {
jc.close();
}
} |
java | public Path getAuditFilePath() {
StringBuilder auditFileNameBuilder = new StringBuilder();
auditFileNameBuilder.append("P=").append(auditMetadata.getPhase()).append(FILE_NAME_DELIMITTER).append("C=")
.append(auditMetadata.getCluster()).append(FILE_NAME_DELIMITTER).append("E=")
.append(auditMetadata.getExtractId()).append(FILE_NAME_DELIMITTER).append("S=")
.append(auditMetadata.getSnapshotId()).append(FILE_NAME_DELIMITTER).append("D=")
.append(auditMetadata.getDeltaId());
return new Path(auditDirPath, PathUtils.combinePaths(auditMetadata.getTableMetadata().getDatabase(), auditMetadata
.getTableMetadata().getTable(), auditFileNameBuilder.toString(), auditMetadata.getPartFileName()));
} |
python | def _call(self, x, out=None):
"""Implement ``self(x[, out])``."""
if out is None:
return self.operator(self.scalar * x)
else:
if self.__tmp is not None:
tmp = self.__tmp
else:
tmp = self.domain.element()
tmp.lincomb(self.scalar, x)
self.operator(tmp, out=out) |
python | def eval_adiabatic_limit(YABFGN, Ytilde, P0):
"""Compute the limiting SLH model for the adiabatic approximation
Args:
YABFGN: The tuple (Y, A, B, F, G, N)
as returned by prepare_adiabatic_limit.
Ytilde: The pseudo-inverse of Y, satisfying Y * Ytilde = P0.
P0: The projector onto the null-space of Y.
Returns:
SLH: Limiting SLH model
"""
Y, A, B, F, G, N = YABFGN
Klim = (P0 * (B - A * Ytilde * A) * P0).expand().simplify_scalar()
Hlim = ((Klim - Klim.dag())/2/I).expand().simplify_scalar()
Ldlim = (P0 * (G - A * Ytilde * F) * P0).expand().simplify_scalar()
dN = identity_matrix(N.shape[0]) + F.H * Ytilde * F
Nlim = (P0 * N * dN * P0).expand().simplify_scalar()
return SLH(Nlim.dag(), Ldlim.dag(), Hlim.dag()) |
python | def report_again(self, current_status):
"""Computes a sleep interval, sleeps for the specified amount of time
then kicks off another status report.
"""
# calculate sleep interval based on current status and configured interval
_m = {'playing': 1, 'paused': 2, 'stopped': 5}[current_status['state']]
interval = (self.config['status_update_interval'] * _m) / 1000.0
# sleep for computed interval and kickoff another webhook
time.sleep(interval)
self.in_future.report_status() |
python | def humanize_size(size):
"""Create a nice human readable representation of the given number
(understood as bytes) using the "KiB" and "MiB" suffixes to indicate
kibibytes and mebibytes. A kibibyte is defined as 1024 bytes (as opposed to
a kilobyte which is 1000 bytes) and a mibibyte is 1024**2 bytes (as opposed
to a megabyte which is 1000**2 bytes).
:param size: the number to convert
:type size: int
:returns: the human readable representation of size
:rtype: str
"""
for factor, format_string in ((1, '%i'),
(1024, '%iKiB'),
(1024 * 1024, '%.1fMiB')):
if size / factor < 1024:
return format_string % (size / factor)
return format_string % (size / factor) |
java | protected void rehash(KType[] fromKeys, VType[] fromValues) {
assert fromKeys.length == fromValues.length &&
HashContainers.checkPowerOfTwo(fromKeys.length - 1);
// Rehash all stored key/value pairs into the new buffers.
final KType[] keys = Intrinsics.<KType[]> cast(this.keys);
final VType[] values = Intrinsics.<VType[]> cast(this.values);
final int mask = this.mask;
KType existing;
// Copy the zero element's slot, then rehash everything else.
int from = fromKeys.length - 1;
keys[keys.length - 1] = fromKeys[from];
values[values.length - 1] = fromValues[from];
while (--from >= 0) {
if (!Intrinsics.<KType> isEmpty(existing = fromKeys[from])) {
int slot = hashKey(existing) & mask;
while (!Intrinsics.<KType> isEmpty(keys[slot])) {
slot = (slot + 1) & mask;
}
keys[slot] = existing;
values[slot] = fromValues[from];
}
}
} |
python | def is_pickle_file(abspath):
"""Parse file extension.
- *.pickle: uncompressed, utf-8 encode pickle file
- *.gz: compressed, utf-8 encode pickle file
"""
abspath = abspath.lower()
fname, ext = os.path.splitext(abspath)
if ext in [".pickle", ".pk", ".p"]:
is_pickle = True
elif ext == ".gz":
is_pickle = False
elif ext == ".tmp":
return is_pickle_file(fname)
else:
raise PickleExtError(
"'%s' is not a valid pickle file. "
"extension has to be '.pickle' for uncompressed, '.gz' "
"for compressed." % abspath)
return is_pickle |
python | def p_lexical_var_list(p):
'''lexical_var_list : lexical_var_list COMMA AND VARIABLE
| lexical_var_list COMMA VARIABLE
| AND VARIABLE
| VARIABLE'''
if len(p) == 5:
p[0] = p[1] + [ast.LexicalVariable(p[4], True, lineno=p.lineno(2))]
elif len(p) == 4:
p[0] = p[1] + [ast.LexicalVariable(p[3], False, lineno=p.lineno(2))]
elif len(p) == 3:
p[0] = [ast.LexicalVariable(p[2], True, lineno=p.lineno(1))]
else:
p[0] = [ast.LexicalVariable(p[1], False, lineno=p.lineno(1))] |
java | public void free() {
if (handle != 0L) {
Log.d(LogDomain.REPLICATOR, "handle: " + handle);
Log.d(
LogDomain.REPLICATOR,
"replicatorContext: " + replicatorContext + " $" + replicatorContext.getClass());
Log.d(
LogDomain.REPLICATOR,
"socketFactoryContext: " + socketFactoryContext + " $" + socketFactoryContext.getClass());
free(handle, replicatorContext, socketFactoryContext);
handle = 0L;
}
if (replicatorContext != null) {
CONTEXT_TO_C4_REPLICATOR_MAP.remove(this.replicatorContext);
replicatorContext = null;
}
} |
java | @Override
public T validate(T wert) {
if (!isValid(wert)) {
throw new InvalidLengthException(Objects.toString(wert), min, max);
}
return wert;
} |
java | private boolean hasOverriddenNativeProperty(String propertyName) {
if (isNativeObjectType()) {
return false;
}
JSType propertyType = getPropertyType(propertyName);
ObjectType nativeType =
isFunctionType()
? registry.getNativeObjectType(JSTypeNative.FUNCTION_PROTOTYPE)
: registry.getNativeObjectType(JSTypeNative.OBJECT_PROTOTYPE);
JSType nativePropertyType = nativeType.getPropertyType(propertyName);
return propertyType != nativePropertyType;
} |
java | public OvhTask organizationName_service_exchangeService_mailingList_POST(String organizationName, String exchangeService, OvhMailingListDepartRestrictionEnum departRestriction, String displayName, Boolean hiddenFromGAL, OvhMailingListJoinRestrictionEnum joinRestriction, String mailingListAddress, Long maxReceiveSize, Long maxSendSize, Boolean senderAuthentification) throws IOException {
String qPath = "/email/exchange/{organizationName}/service/{exchangeService}/mailingList";
StringBuilder sb = path(qPath, organizationName, exchangeService);
HashMap<String, Object>o = new HashMap<String, Object>();
addBody(o, "departRestriction", departRestriction);
addBody(o, "displayName", displayName);
addBody(o, "hiddenFromGAL", hiddenFromGAL);
addBody(o, "joinRestriction", joinRestriction);
addBody(o, "mailingListAddress", mailingListAddress);
addBody(o, "maxReceiveSize", maxReceiveSize);
addBody(o, "maxSendSize", maxSendSize);
addBody(o, "senderAuthentification", senderAuthentification);
String resp = exec(qPath, "POST", sb.toString(), o);
return convertTo(resp, OvhTask.class);
} |
java | @Override
public Exception isCorrect (final ProtocolDataUnit protocolDataUnit) {
final AbstractMessageParser parser = protocolDataUnit.getBasicHeaderSegment().getParser();
if (parser instanceof DataInParser) {
final DataInParser dataParser = (DataInParser) parser;
try {
if (connection.getSettingAsBoolean(OperationalTextKey.DATA_PDU_IN_ORDER) && connection.getSettingAsBoolean(OperationalTextKey.DATA_SEQUENCE_IN_ORDER)) {
if (dataParser.getBufferOffset() < bufferOffset) { return new IllegalStateException(new StringBuilder("This buffer offsets must be in increasing order and overlays are forbidden.").append(" The parserOffset here is ").append(dataParser.getBufferOffset()).append(" and the bufferOffset is ").append(bufferOffset).toString()); }
bufferOffset = dataParser.getBufferOffset();
}
} catch (OperationalTextKeyException e) {
return e;
}
if (dataParser.getDataSequenceNumber() != expectedDataSequenceNumber) { return new IllegalStateException(new StringBuilder("Data Sequence Number Mismatch (received, expected): " + dataParser.getDataSequenceNumber() + ", " + expectedDataSequenceNumber).toString());
}
incrementExpectedDataSequenceNumber();
if (dataParser.isStatusFlag()) {
incrementExpectedDataSequenceNumber();
return super.isCorrect(protocolDataUnit);
} else if (dataParser.getStatusSequenceNumber() != 0) { return new IllegalStateException(new StringBuilder("Status Sequence Number must be zero.").toString()); }
return null;
} else if (parser instanceof SCSIResponseParser) {
try {
if (connection.getSettingAsBoolean(OperationalTextKey.IMMEDIATE_DATA)) { return new IllegalStateException(new StringBuilder("Parser ").append("should not be instance of SCSIResponseParser because of ImmendiateData-Flag \"no\" in config!").toString()); }
} catch (OperationalTextKeyException e) {
return e;
}
return null;
} else {
return new IllegalStateException(new StringBuilder("Parser ").append(protocolDataUnit.getBasicHeaderSegment().getParser().toString()).append(" is instance of ").append(protocolDataUnit.getBasicHeaderSegment().getParser().getClass().toString()).append(" and not instance of either DataInParser or SCSIResponseParser!").toString());
}
} |
java | public Genomics fromApiKey(String apiKey) {
Preconditions.checkNotNull(apiKey);
return fromApiKey(getGenomicsBuilder(), apiKey).build();
} |
python | def delete(self):
"""Delete this cluster.
For example:
.. literalinclude:: snippets.py
:start-after: [START bigtable_delete_cluster]
:end-before: [END bigtable_delete_cluster]
Marks a cluster and all of its tables for permanent deletion in 7 days.
Immediately upon completion of the request:
* Billing will cease for all of the cluster's reserved resources.
* The cluster's ``delete_time`` field will be set 7 days in the future.
Soon afterward:
* All tables within the cluster will become unavailable.
At the cluster's ``delete_time``:
* The cluster and **all of its tables** will immediately and
irrevocably disappear from the API, and their data will be
permanently deleted.
"""
client = self._instance._client
client.instance_admin_client.delete_cluster(self.name) |
java | private void completeEnclosing(ClassSymbol c) {
if (c.owner.kind == PCK) {
Symbol owner = c.owner;
for (Name name : Convert.enclosingCandidates(Convert.shortName(c.name))) {
Symbol encl = owner.members().lookup(name).sym;
if (encl == null)
encl = classes.get(TypeSymbol.formFlatName(name, owner));
if (encl != null)
encl.complete();
}
}
} |
python | def stop_broadcast(self, broadcast_id):
"""
Use this method to stop a live broadcast of an OpenTok session
:param String broadcast_id: The ID of the broadcast you want to stop
:rtype A Broadcast object, which contains information of the broadcast: id, sessionId
projectId, createdAt, updatedAt and resolution
"""
endpoint = self.endpoints.broadcast_url(broadcast_id, stop=True)
response = requests.post(
endpoint,
headers=self.json_headers(),
proxies=self.proxies,
timeout=self.timeout
)
if response.status_code == 200:
return Broadcast(response.json())
elif response.status_code == 400:
raise BroadcastError(
'Invalid request. This response may indicate that data in your request '
'data is invalid JSON.')
elif response.status_code == 403:
raise AuthError('Authentication error.')
elif response.status_code == 409:
raise BroadcastError(
'The broadcast (with the specified ID) was not found or it has already '
'stopped.')
else:
raise RequestError('OpenTok server error.', response.status_code) |
python | def click_event(self, event):
"""
Execute the 'on_click' method of this module with the given event.
"""
# we can prevent request that a refresh after the event has happened
# by setting this to True. Modules should do this via
# py3.prevent_refresh()
self.prevent_refresh = False
try:
if self.error_messages:
# we have error messages
button = event["button"]
if button == 1:
# cycle through to next message
self.error_index = (self.error_index + 1) % len(self.error_messages)
error = self.error_messages[self.error_index]
self.error_output(error)
if button == 3:
self.hide_errors()
if button != 2 or (self.terminated or self.disabled):
self.prevent_refresh = True
elif self.click_events:
click_method = getattr(self.module_class, "on_click")
if self.click_events == self.PARAMS_NEW:
# new style modules
click_method(event)
else:
# legacy modules had extra parameters passed
click_method(
self.i3status_thread.json_list,
self.config["py3_config"]["general"],
event,
)
self.set_updated()
else:
# nothing has happened so no need for refresh
self.prevent_refresh = True
except Exception:
msg = "on_click event in `{}` failed".format(self.module_full_name)
self._py3_wrapper.report_exception(msg) |
java | @Override
public CommerceDiscount findByLtD_S_Last(Date displayDate, int status,
OrderByComparator<CommerceDiscount> orderByComparator)
throws NoSuchDiscountException {
CommerceDiscount commerceDiscount = fetchByLtD_S_Last(displayDate,
status, orderByComparator);
if (commerceDiscount != null) {
return commerceDiscount;
}
StringBundler msg = new StringBundler(6);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("displayDate=");
msg.append(displayDate);
msg.append(", status=");
msg.append(status);
msg.append("}");
throw new NoSuchDiscountException(msg.toString());
} |
python | def p_arglist(self, tree):
''' V ::= arglist ( V , V )'''
if type(tree[0].value) == type([]):
tree.value = tree[0].value + [tree[2].value]
else:
tree.value = [tree[0].value, tree[2].value]
try:
tree.svalue = "%s,%s"%(tree[0].svalue,tree[2].svalue)
except AttributeError:
pass |
python | def main():
"""
NAME
download_magic.py
DESCRIPTION
unpacks a magic formatted smartbook .txt file from the MagIC database into the
tab delimited MagIC format txt files for use with the MagIC-Py programs.
SYNTAX
download_magic.py command line options]
INPUT
takes either the upload.txt file created by upload_magic.py or a file
downloaded from the MagIC database (http://earthref.org/MagIC)
OPTIONS
-h prints help message and quits
-i allows interactive entry of filename
-f FILE specifies input file name
-sep write location data to separate subdirectories (Location_*), (default False)
-O do not overwrite duplicate Location_* directories while downloading
-DM data model (2 or 3, default 3)
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
# interactive entry
if '-i' in sys.argv:
infile=input("Magic txt file for unpacking? ")
dir_path = '.'
input_dir_path = '.'
# non-interactive
else:
infile = pmag.get_named_arg("-f", reqd=True)
# if -O flag is present, overwrite is False
overwrite = pmag.get_flag_arg_from_sys("-O", true=False, false=True)
# if -sep flag is present, sep is True
sep = pmag.get_flag_arg_from_sys("-sep", true=True, false=False)
data_model = pmag.get_named_arg("-DM", default_val=3, reqd=False)
dir_path = pmag.get_named_arg("-WD", default_val=".", reqd=False)
input_dir_path = pmag.get_named_arg("-ID", default_val=".", reqd=False)
#if '-ID' not in sys.argv and '-WD' in sys.argv:
# input_dir_path = dir_path
if "-WD" not in sys.argv and "-ID" not in sys.argv:
input_dir_path = os.path.split(infile)[0]
if not input_dir_path:
input_dir_path = "."
ipmag.download_magic(infile, dir_path, input_dir_path, overwrite, True, data_model, sep) |
java | public final IonTextWriterBuilder
withLstMinimizing(LstMinimizing minimizing)
{
IonTextWriterBuilder b = mutable();
b.setLstMinimizing(minimizing);
return b;
} |
java | public HierarchicalProperty getProperty(String name) throws RepositoryException
{
for (HierarchicalProperty p : properties)
{
if (p.getStringName().equals(name))
return p;
}
return null;
// if ("jcr:primaryType".equals(name) || "jcr:mixinTypes".equals(name) ||
// "jcr:data".equals(name)
// || "jcr:uuid".equals(name))
// return null;
//
// String value;
// try {
// value = node.getProperty(name).getString();
// } catch (PathNotFoundException e) {
// return null;
// }
// String ns = ((ExtendedSession) node.getSession()).getLocationFactory()
// .parseJCRName(name)
// .getNamespace();
// return new HierarchicalProperty(name, value, ns);
} |
python | def handle_or_else(self, orelse, test):
"""Handle the orelse part of an if or try node.
Args:
orelse(list[Node])
test(Node)
Returns:
The last nodes of the orelse branch.
"""
if isinstance(orelse[0], ast.If):
control_flow_node = self.visit(orelse[0])
# Prefix the if label with 'el'
control_flow_node.test.label = 'el' + control_flow_node.test.label
test.connect(control_flow_node.test)
return control_flow_node.last_nodes
else:
else_connect_statements = self.stmt_star_handler(
orelse,
prev_node_to_avoid=self.nodes[-1]
)
test.connect(else_connect_statements.first_statement)
return else_connect_statements.last_statements |
java | void rotate(Collection<AtomPair> pairs) {
// bond has already been tried in this phase so
// don't need to test again
Set<IBond> tried = new HashSet<>();
Pair:
for (AtomPair pair : pairs) {
for (IBond bond : pair.bndAt) {
// only try each bond once per phase and skip
if (!tried.add(bond))
continue;
if (bfix.contains(bond))
continue;
// those we have found to probably be symmetric
if (probablySymmetric.contains(bond))
continue;
// can't rotate these
if (bond.getOrder() != IBond.Order.SINGLE || bond.isInRing())
continue;
final IAtom beg = bond.getBegin();
final IAtom end = bond.getEnd();
final int begIdx = idxs.get(beg);
final int endIdx = idxs.get(end);
// terminal
if (adjList[begIdx].length == 1 || adjList[endIdx].length == 1)
continue;
int begPriority = beg.getProperty(AtomPlacer.PRIORITY);
int endPriority = end.getProperty(AtomPlacer.PRIORITY);
Arrays.fill(visited, false);
if (begPriority < endPriority) {
stackBackup.len = visitAdj(visited, stackBackup.xs, begIdx, endIdx);
// avoid moving fixed atoms
if (!afix.isEmpty()) {
final int begCnt = numFixedMoved(stackBackup.xs, stackBackup.len);
if (begCnt > 0) {
Arrays.fill(visited, false);
stackBackup.len = visitAdj(visited, stackBackup.xs, endIdx, begIdx);
final int endCnt = numFixedMoved(stackBackup.xs, stackBackup.len);
if (endCnt > 0)
continue;
}
}
}
else {
stackBackup.len = visitAdj(visited, stackBackup.xs, endIdx, begIdx);
// avoid moving fixed atoms
if (!afix.isEmpty()) {
final int endCnt = numFixedMoved(stackBackup.xs, stackBackup.len);
if (endCnt > 0) {
Arrays.fill(visited, false);
stackBackup.len = visitAdj(visited, stackBackup.xs, begIdx, endIdx);
final int begCnt = numFixedMoved(stackBackup.xs, stackBackup.len);
if (begCnt > 0)
continue;
}
}
}
double min = congestion.score();
backupCoords(backup, stackBackup);
reflect(stackBackup, beg, end);
congestion.update(visited, stackBackup.xs, stackBackup.len);
double delta = min - congestion.score();
// keep if decent improvement or improvement and resolves this overlap
if (delta > ROTATE_DELTA_THRESHOLD ||
(delta > 1 && congestion.contribution(pair.fst, pair.snd) < MIN_SCORE)) {
continue Pair;
} else {
// almost no difference from flipping... bond is probably symmetric
// mark to avoid in future iterations
if (Math.abs(delta) < 0.1)
probablySymmetric.add(bond);
// restore
restoreCoords(stackBackup, backup);
congestion.update(visited, stackBackup.xs, stackBackup.len);
congestion.score = min;
}
}
}
} |
java | private String getRequestBody(ServletRequest request) throws IOException {
final StringBuilder sb = new StringBuilder();
String line = request.getReader().readLine();
while (null != line) {
sb.append(line);
line = request.getReader().readLine();
}
return sb.toString();
} |
java | public String getProperty(String key, String defaultValue) {
String property = configProperties.getProperty(key, defaultValue);
if (property != null) {
property = property.trim();
}
return property;
} |
python | def _get_param_values(self, name):
"""
Return the parameter by name as stored on the protocol
agent payload. This loads the data from the local cache
versus having to query the SMC for each parameter.
:param str name: name of param
:rtype: dict
"""
for param in self.data.get('paParameters', []):
for _pa_parameter, values in param.items():
if values.get('name') == name:
return values |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.