language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public static RowProcessor find(String query, Object ... params) {
return new DB(DB.DEFAULT_NAME).find(query, params);
} |
java | public static void dump(ClassLoader cl) {
System.err.println("Dump Loaders:");
while (cl != null) {
System.err.println(" loader " + cl);
cl = cl.getParent();
}
} |
java | public void findMatch(ArrayList<String> speechResult) {
loadCadidateString();
for (String matchCandidate : speechResult) {
Locale localeDefault = mGvrContext.getActivity().getResources().getConfiguration().locale;
if (volumeUp.equals(matchCandidate)) {
startVolumeUp();
break;
} else if (volumeDown.toLowerCase(localeDefault).equals(matchCandidate.toLowerCase(localeDefault))) {
startVolumeDown();
break;
} else if (zoomIn.toLowerCase(localeDefault).equals(matchCandidate.toLowerCase(localeDefault))) {
startZoomIn();
break;
} else if (zoomOut.toLowerCase(localeDefault).equals(matchCandidate.toLowerCase(localeDefault))) {
startZoomOut();
break;
} else if (invertedColors.toLowerCase(localeDefault).equals(matchCandidate.toLowerCase(localeDefault))) {
startInvertedColors();
break;
} else if (talkBack.toLowerCase(localeDefault).equals(matchCandidate.toLowerCase(localeDefault))) {
enableTalkBack();
} else if (disableTalkBack.toLowerCase(localeDefault).equals(matchCandidate.toLowerCase(localeDefault))) {
disableTalkBack();
}
}
} |
java | public com.google.cloud.datalabeling.v1beta1.LabelAudioTranscriptionOperationMetadataOrBuilder
getAudioTranscriptionDetailsOrBuilder() {
if (detailsCase_ == 10) {
return (com.google.cloud.datalabeling.v1beta1.LabelAudioTranscriptionOperationMetadata)
details_;
}
return com.google.cloud.datalabeling.v1beta1.LabelAudioTranscriptionOperationMetadata
.getDefaultInstance();
} |
python | def _get_axis_bounds(self, dim, bunch):
"""Return the min/max of an axis."""
if dim in self.attributes:
# Attribute: specified lim, or compute the min/max.
vmin, vmax = bunch['lim']
assert vmin is not None
assert vmax is not None
return vmin, vmax
# PC dimensions: use the common scaling.
return (-1. / self.scaling, +1. / self.scaling) |
java | private void drawOnto(Graphics2D pGraphics) throws IOException {
context = new QuickDrawContext(pGraphics);
readPICTopcodes(imageInput);
if (DEBUG) {
System.out.println("Done reading PICT body!");
}
} |
java | @Override
protected int add2Select(final SQLSelect _select)
{
_select.column(2, "ID").column(2, JCRStoreResource.COLNAME_IDENTIFIER)
.leftJoin(JCRStoreResource.TABLENAME_STORE, 2, "ID", 0, "ID");
return 1;
} |
java | public VacuumConsumableStatus consumableStatus() throws CommandExecutionException {
JSONArray resp = sendToArray("get_consumable");
JSONObject stat = resp.optJSONObject(0);
if (stat == null) throw new CommandExecutionException(CommandExecutionException.Error.INVALID_RESPONSE);
return new VacuumConsumableStatus(stat);
} |
python | def proj_l2ball(b, s, r, axes=None):
r"""
Project :math:`\mathbf{b}` into the :math:`\ell_2` ball of radius
:math:`r` about :math:`\mathbf{s}`, i.e.
:math:`\{ \mathbf{x} : \|\mathbf{x} - \mathbf{s} \|_2 \leq r \}`.
Note that ``proj_l2ball(b, s, r)`` is equivalent to
:func:`.prox.proj_l2` ``(b - s, r) + s``.
Parameters
----------
b : array_like
Vector :math:`\mathbf{b}` to be projected
s : array_like
Centre of :math:`\ell_2` ball :math:`\mathbf{s}`
r : float
Radius of ball
axes : sequence of ints, optional (default all axes)
Axes over which to compute :math:`\ell_2` norms
Returns
-------
x : ndarray
Projection of :math:`\mathbf{b}` into ball
"""
d = np.sqrt(np.sum((b - s)**2, axis=axes, keepdims=True))
p = zdivide(b - s, d)
return np.asarray((d <= r) * b + (d > r) * (s + r*p), b.dtype) |
python | def last_datapoint(self, sid, epoch=False):
"""
Parameters
----------
sid : str
SensorId
epoch : bool
default False
If True return as epoch
If False return as pd.Timestamp
Returns
-------
pd.Timestamp | int, float
"""
block = self._last_block(sid)
if block is None:
return None, None
header = block['h']
timestamp, value = header['tail']
if not epoch:
timestamp = pd.Timestamp.utcfromtimestamp(timestamp)
timestamp = timestamp.tz_localize('UTC')
return timestamp, value |
java | public StrBuilder insert(final int index, final char value) {
validateIndex(index);
ensureCapacity(size + 1);
System.arraycopy(buffer, index, buffer, index + 1, size - index);
buffer[index] = value;
size++;
return this;
} |
java | private int _readChar ()
{
try
{
final int c = m_aReader.read ();
if (m_bTrackPosition)
{
if (m_nBackupChars > 0)
{
// If previously a char was backed up, don't increase the position!
m_nBackupChars--;
}
else
m_aPos.updatePosition (c, m_nTabSize);
}
return c;
}
catch (final IOException ex)
{
return EOI;
}
} |
python | def filter_transcription_factor(stmts_in, **kwargs):
"""Filter out RegulateAmounts where subject is not a transcription factor.
Parameters
----------
stmts_in : list[indra.statements.Statement]
A list of statements to filter.
save : Optional[str]
The name of a pickle file to save the results (stmts_out) into.
Returns
-------
stmts_out : list[indra.statements.Statement]
A list of filtered statements.
"""
logger.info('Filtering %d statements to remove ' % len(stmts_in) +
'amount regulations by non-transcription-factors...')
path = os.path.dirname(os.path.abspath(__file__))
tf_table = \
read_unicode_csv(path + '/../resources/transcription_factors.csv')
gene_names = [lin[1] for lin in list(tf_table)[1:]]
stmts_out = []
for st in stmts_in:
if isinstance(st, RegulateAmount):
if st.subj is not None:
if st.subj.name in gene_names:
stmts_out.append(st)
else:
stmts_out.append(st)
logger.info('%d statements after filter...' % len(stmts_out))
dump_pkl = kwargs.get('save')
if dump_pkl:
dump_statements(stmts_out, dump_pkl)
return stmts_out |
java | public void marshall(ModelPackageContainerDefinition modelPackageContainerDefinition, ProtocolMarshaller protocolMarshaller) {
if (modelPackageContainerDefinition == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(modelPackageContainerDefinition.getContainerHostname(), CONTAINERHOSTNAME_BINDING);
protocolMarshaller.marshall(modelPackageContainerDefinition.getImage(), IMAGE_BINDING);
protocolMarshaller.marshall(modelPackageContainerDefinition.getImageDigest(), IMAGEDIGEST_BINDING);
protocolMarshaller.marshall(modelPackageContainerDefinition.getModelDataUrl(), MODELDATAURL_BINDING);
protocolMarshaller.marshall(modelPackageContainerDefinition.getProductId(), PRODUCTID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def do_delete(self, args):
'''delete the entire contents of the current namespace'''
namespace = self.config['namespace']
if not args.assume_yes:
response = raw_input('Delete everything in {0!r}? Enter namespace: '
.format(namespace))
if response != namespace:
self.stdout.write('not deleting anything\n')
return
self.stdout.write('deleting namespace {0!r}\n'.format(namespace))
self.task_master.clear() |
python | def _clear(reason, idle_pool, using_pool, channel_pool, conn_id):
"""
clear the bad connection
:param reason:
:param idle_pool:
:param using_pool:
:param channel_pool:
:param conn_id:
:return:
"""
with _lock():
try:
idle_pool.pop(conn_id)
logger.info('a connection lost when not using')
except KeyError:
if using_pool.pop(conn_id, None):
logger.warn('connection lost when using, should be handled later')
return reason
finally:
channel_pool.pop(conn_id, None) |
java | public void initializeFinished() {
if (CmsLog.INIT.isInfoEnabled()) {
CmsLog.INIT.info(Messages.get().getBundle().key(Messages.INIT_VFS_CONFIG_FINISHED_0));
}
} |
java | public static String getPathToChild(Resource file, Resource dir) {
if (dir == null || !file.getResourceProvider().getScheme().equals(dir.getResourceProvider().getScheme())) return null;
boolean isFile = file.isFile();
String str = "/";
while (file != null) {
if (file.equals(dir)) {
if (isFile) return str.substring(0, str.length() - 1);
return str;
}
str = "/" + file.getName() + str;
file = file.getParentResource();
}
return null;
} |
java | boolean processBytesFromPeer(ByteBuf data) throws GeneralSecurityException {
checkState(unwrapper != null, "protector already created");
try (BufUnwrapper unwrapper = this.unwrapper) {
int bytesRead = 0;
boolean done = false;
for (ByteBuffer nioBuffer : unwrapper.readableNioBuffers(data)) {
if (!nioBuffer.hasRemaining()) {
// This buffer has been fully read, continue to the next buffer.
continue;
}
int prevPos = nioBuffer.position();
done = internalHandshaker.processBytesFromPeer(nioBuffer);
bytesRead += nioBuffer.position() - prevPos;
if (done) {
break;
}
}
data.readerIndex(data.readerIndex() + bytesRead);
return done;
}
} |
java | public Integer getAssistedQueryColumnCount(final String logicTableName) {
for (ShardingEncryptorStrategy each : shardingEncryptorStrategies.values()) {
int result = each.getAssistedQueryColumnCount(logicTableName);
if (result > 0) {
return result;
}
}
return 0;
} |
java | public EventSubscriptionEntity findMessageStartEventSubscriptionByNameAndTenantId(String messageName, String tenantId) {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("messageName", messageName);
parameters.put("tenantId", tenantId);
return (EventSubscriptionEntity) getDbEntityManager().selectOne("selectMessageStartEventSubscriptionByNameAndTenantId", parameters);
} |
python | def _ask_for_ledger_status(self, node_name: str, ledger_id):
"""
Ask other node for LedgerStatus
"""
self.request_msg(LEDGER_STATUS, {f.LEDGER_ID.nm: ledger_id},
[node_name, ])
logger.info("{} asking {} for ledger status of ledger {}".format(self, node_name, ledger_id)) |
java | public JcNumber cos() {
JcNumber ret = new JcNumber(null, this.argument,
new FunctionInstance(FUNCTION.Math.COS, 1));
QueryRecorder.recordInvocationConditional(this, "cos", ret);
return ret;
} |
python | def handle_delete_user(self, req):
"""Handles the DELETE v2/<account>/<user> call for deleting a user from an
account.
Can only be called by an account .admin.
:param req: The swob.Request to process.
:returns: swob.Response, 2xx on success.
"""
# Validate path info
account = req.path_info_pop()
user = req.path_info_pop()
if req.path_info or not account or account[0] == '.' or not user or \
user[0] == '.':
return HTTPBadRequest(request=req)
# if user to be deleted is reseller_admin, then requesting
# user must be the super_admin
is_reseller_admin = self.is_user_reseller_admin(req, account, user)
if not is_reseller_admin and not req.credentials_valid:
# if user to be deleted can't be found, return 404
return HTTPNotFound(request=req)
elif is_reseller_admin and not self.is_super_admin(req):
return HTTPForbidden(request=req)
if not self.is_account_admin(req, account):
return self.denied_response(req)
# Delete the user's existing token, if any.
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))
resp = self.make_pre_authed_request(
req.environ, 'HEAD', path).get_response(self.app)
if resp.status_int == 404:
return HTTPNotFound(request=req)
elif resp.status_int // 100 != 2:
raise Exception('Could not obtain user details: %s %s' %
(path, resp.status))
candidate_token = resp.headers.get('x-object-meta-auth-token')
if candidate_token:
object_name = self._get_concealed_token(candidate_token)
path = quote('/v1/%s/.token_%s/%s' %
(self.auth_account, object_name[-1], object_name))
resp = self.make_pre_authed_request(
req.environ, 'DELETE', path).get_response(self.app)
if resp.status_int // 100 != 2 and resp.status_int != 404:
raise Exception('Could not delete possibly existing token: '
'%s %s' % (path, resp.status))
# Delete the user entry itself.
path = quote('/v1/%s/%s/%s' % (self.auth_account, account, user))
resp = self.make_pre_authed_request(
req.environ, 'DELETE', path).get_response(self.app)
if resp.status_int // 100 != 2 and resp.status_int != 404:
raise Exception('Could not delete the user object: %s %s' %
(path, resp.status))
return HTTPNoContent(request=req) |
java | boolean isNodeTypeInUse( Name nodeTypeName ) throws InvalidQueryException {
String nodeTypeString = nodeTypeName.getString(context.getNamespaceRegistry());
String expression = "SELECT * from [" + nodeTypeString + "] LIMIT 1";
TypeSystem typeSystem = context.getValueFactories().getTypeSystem();
// Parsing must be done now ...
QueryCommand command = queryParser.parseQuery(expression, typeSystem);
assert command != null : "Could not parse " + expression;
Schemata schemata = getRepositorySchemata();
// Now query the entire repository for any nodes that use this node type ...
RepositoryCache repoCache = repository.repositoryCache();
RepositoryQueryManager queryManager = repository.queryManager();
Set<String> workspaceNames = repoCache.getWorkspaceNames();
Map<String, NodeCache> overridden = null;
NodeTypes nodeTypes = repository.nodeTypeManager().getNodeTypes();
RepositoryIndexes indexDefns = repository.queryManager().getIndexes();
CancellableQuery query = queryManager.query(context, repoCache, workspaceNames, overridden, command, schemata,
indexDefns, nodeTypes, null, null);
try {
QueryResults result = query.execute();
if (result.isEmpty()) return false;
if (result.getRowCount() < 0) {
// Try to get the first row ...
NodeSequence seq = result.getRows();
Batch batch = seq.nextBatch();
while (batch != null) {
if (batch.hasNext()) return true;
// It's not common for the first batch may be empty, but it's possible. So try the next batch ...
batch = seq.nextBatch();
}
return false;
}
return result.getRowCount() > 0;
} catch (RepositoryException e) {
logger.error(e, JcrI18n.errorCheckingNodeTypeUsage, nodeTypeName, e.getLocalizedMessage());
return true;
}
} |
java | @Override
public void close () throws IOException
{
if (in != null)
{
in.close ();
in = null;
m_aBuf = null;
}
} |
python | def __retry_session(self, retries=10, backoff_factor=0.3,
status_forcelist=(500, 502, 504),
session=None):
"""
Retry the connection using requests if it fails. Use this as a wrapper
to request from datapoint
"""
# requests.Session allows finer control, which is needed to use the
# retrying code
the_session = session or requests.Session()
# The Retry object manages the actual retrying
retry = Retry(total=retries, read=retries, connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
the_session.mount('http://', adapter)
the_session.mount('https://', adapter)
return the_session |
java | public static Map<String,String> order(Map<String, String> map){
HashMap<String, String> tempMap = new LinkedHashMap<String, String>();
List<Map.Entry<String, String>> infoIds = new ArrayList<Map.Entry<String, String>>( map.entrySet());
Collections.sort(infoIds, new Comparator<Map.Entry<String, String>>() {
public int compare(Map.Entry<String, String> o1,Map.Entry<String, String> o2) {
return (o1.getKey()).toString().compareTo(o2.getKey());
}
});
for (int i = 0; i < infoIds.size(); i++) {
Map.Entry<String, String> item = infoIds.get(i);
tempMap.put(item.getKey(), item.getValue());
}
return tempMap;
} |
python | def remember(self, request, username, **kw):
""" Returns 'WWW-Authenticate' header with a value that should be used
in 'Authorization' header.
"""
if self.credentials_callback:
token = self.credentials_callback(username, request)
api_key = 'ApiKey {}:{}'.format(username, token)
return [('WWW-Authenticate', api_key)] |
java | public int estimateMaximumBytes() {
int count = this.size();
for (String key: _rsets.keySet()) {
CachedResultSet crs = _rsets.get(key);
if (crs.rows != null) {
count += crs.columns.length*crs.rows.size();
}
}
return count * 64;
} |
java | public void addActionImport(Class<?> cls) {
EdmActionImport actionImportAnnotation = cls.getAnnotation(EdmActionImport.class);
ActionImportImpl.Builder actionImportBuilder = new ActionImportImpl.Builder()
.setEntitySetName(actionImportAnnotation.entitySet())
.setActionName(actionImportAnnotation.namespace() + "." + actionImportAnnotation.action())
.setName(actionImportAnnotation.name())
.setJavaClass(cls);
actionImportBuilders.add(actionImportBuilder);
} |
java | private void setConfigEntityMapping(Map<String, Object> configProps) throws WIMException {
List<String> entityTypes = getSupportedEntityTypes();
String rdnProp;
String type = null;
entityConfigMap = new HashMap<String, String>();
for (int i = 0; i < entityTypes.size(); i++) {
type = entityTypes.get(i);
rdnProp = (getRDNProperties(type) == null) ? null : getRDNProperties(type)[0];
entityConfigMap.put(type, rdnProp);
}
if (entityConfigMap.get(Service.DO_LOGIN_ACCOUNT) == null && entityConfigMap.get(personAccountType) != null)
entityConfigMap.put(Service.DO_LOGIN_ACCOUNT, entityConfigMap.get(personAccountType));
if (tc.isDebugEnabled())
Tr.debug(tc, "setConfigEntityMapping entityConfigMap:" + entityConfigMap);
} |
java | public void end(Xid xid, int flags) throws XAException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "end", new Object[]{"XID="+xid, _manager.xaFlagsToString(flags)});
try
{
_manager.end(new PersistentTranId(xid), flags);
// Reset our instance variables.
_currentTran = null;
_currentPtid = null;
}
catch (XidUnknownException xue)
{
com.ibm.ws.ffdc.FFDCFilter.processException(xue, "com.ibm.ws.sib.msgstore.transactions.MSDelegatingXAResource.end", "1:375:1.51.1.7", this);
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.event(this, tc, "Cannot dis-associate from this Xid. It is unknown!", xue);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "end");
XAException xaException = new XAException(XAException.XAER_NOTA);
xaException.initCause(xue);
throw xaException;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "end");
} |
python | def reinit(self):
"""Use carefully to reset the episode count to 0."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self.server, self.port))
self._hello(sock)
comms.send_message(sock, ("<Init>" + self._get_token() + "</Init>").encode())
reply = comms.recv_message(sock)
sock.close()
ok, = struct.unpack('!I', reply)
return ok != 0 |
java | public static String format(BigDecimal number, String pattern) {
return format(number.doubleValue(), pattern);
} |
python | def path_exists(self, dest, weight=None):
"""Return whether there is a path leading from me to ``dest``.
With ``weight``, only consider edges that have a stat by the
given name.
Raise ``ValueError`` if ``dest`` is not a node in my character
or the name of one.
"""
try:
return bool(self.shortest_path_length(dest, weight))
except KeyError:
return False |
python | def order_market_sell(self, **params):
"""Send in a new market sell order
:param symbol: required
:type symbol: str
:param quantity: required
:type quantity: decimal
:param newClientOrderId: A unique id for the order. Automatically generated if not sent.
:type newClientOrderId: str
:param newOrderRespType: Set the response JSON. ACK, RESULT, or FULL; default: RESULT.
:type newOrderRespType: str
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
See order endpoint for full response options
:raises: BinanceRequestException, BinanceAPIException, BinanceOrderException, BinanceOrderMinAmountException, BinanceOrderMinPriceException, BinanceOrderMinTotalException, BinanceOrderUnknownSymbolException, BinanceOrderInactiveSymbolException
"""
params.update({
'side': self.SIDE_SELL
})
return self.order_market(**params) |
java | @Override
protected void preparePaintComponent(final Request request) {
explanationWithTimeStamp.setText("The time the page was rendered was " + (new Date()).toString());
tabset1TabClient.setContent(new ExplanatoryText(
"This content was present when the page first rendered at " + (new Date()).toString()));
tabset1TabServer.setContent(new ExplanatoryText(
"This is the content of tab two. It should be noted that mode SERVER is deprecated.\nThis mode poses a"
+ " number of usability problems and should not be used.\n This content was created at "
+ (new Date()).toString()));
tabset1TabLazy.setContent(new ExplanatoryText(
"This tab content is rendered when the tab opens then remains static. Check the date stamp: "
+ (new Date()).toString()));
tabset1TabDynamic.setContent(new ExplanatoryText(
"This tab content refreshes each time it is opened. Check the date stamp: "
+ (new Date()).toString()));
tabset1TabEager.setContent(new ExplanatoryText(
"This tab content is fetched once asynchronously then remains static. Check the date stamp: "
+ (new Date()).toString()));
} |
java | public PagedList<String> listWebAppsByHybridConnection(final String resourceGroupName, final String name, final String namespaceName, final String relayName) {
ServiceResponse<Page<String>> response = listWebAppsByHybridConnectionSinglePageAsync(resourceGroupName, name, namespaceName, relayName).toBlocking().single();
return new PagedList<String>(response.body()) {
@Override
public Page<String> nextPage(String nextPageLink) {
return listWebAppsByHybridConnectionNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
} |
python | def _perform_system_check(self):
"""
Perform a system check to define if we need to throttle to handle
all the incoming messages
"""
if Global.CONFIG_MANAGER.tracing_mode:
Global.LOGGER.debug("performing a system check")
now = datetime.datetime.now()
sent = Global.MESSAGE_DISPATCHER.dispatched
received = self.fetched
queue_length = sent - received
message_sleep_interval = Global.CONFIG_MANAGER.message_fetcher_sleep_interval
if Global.CONFIG_MANAGER.show_stats:
if (now - self.last_stats_check_date).total_seconds() > Global.CONFIG_MANAGER.stats_timeout:
self.last_stats_check_date = now
stats_string = f"showing stats\n--- [STATS] ---\nMessage Sent: {sent}\nMessage Received: {received}\nMessage Sleep Interval = {message_sleep_interval}\nQueue length = {queue_length}\n--- [ END ] ---"
Global.LOGGER.info(stats_string)
# if we are accumulating messages, or we have processed at least 5000 messages
# since last check, we need to speed up the process
messages_limit_reached = sent - self.last_queue_check_count > Global.CONFIG_MANAGER.messages_dispatched_for_system_check
queue_limit_reached = queue_length > Global.CONFIG_MANAGER.queue_length_for_system_check
time_limit_since_last_check_is_over = (now - self.last_queue_check_date).total_seconds() > Global.CONFIG_MANAGER.seconds_between_queue_check
if not Global.CONFIG_MANAGER.fixed_message_fetcher_interval:
if (messages_limit_reached) or (queue_limit_reached and time_limit_since_last_check_is_over):
cause = "messages limit reached" if messages_limit_reached else "queue limit reached"
Global.LOGGER.debug(f"triggering the throttle function due to {cause}")
self._adapt_sleep_interval(sent, received, queue_length, now) |
python | def container(dec):
"""Meta-decorator (for decorating decorators)
Keeps around original decorated function as a property ``orig_func``
:param dec: Decorator to decorate
:type dec: function
:returns: Decorated decorator
"""
# Credits: http://stackoverflow.com/a/1167248/1798683
@wraps(dec)
def meta_decorator(f):
decorator = dec(f)
decorator.orig_func = f
return decorator
return meta_decorator |
python | def _get_resolution(age, values):
"""
Calculates the resolution (res)
Thanks Deborah!
"""
res = []
try:
# Get the nan index from the values and remove from age
# age2 = age[np.where(~np.isnan(values))[0]]
# res = np.diff(age2)
# Make sure that age and values are numpy arrays
# age = np.array(age, dtype=float)
# values = np.array(values, dtype=float)
# Get the nan index from the values and remove from age
age2 = age[np.where(~np.isnan(values))[0]]
res = np.diff(age2)
except IndexError as e:
print("get_resolution: IndexError: {}".format(e))
except Exception as e:
logger_inferred_data.warn("get_resolution: Exception: {}".format(e))
return res |
python | def divide_works(self, corpus):
"""Use the work-breaking option.
TODO: Maybe incorporate this into ``convert_corpus()``
TODO: Write test for this
"""
if corpus == 'tlg':
orig_dir_rel = '~/cltk_data/originals/tlg'
works_dir_rel = '~/cltk_data/greek/text/tlg/individual_works'
file_prefix = 'TLG'
latin = False
elif corpus == 'phi5':
orig_dir_rel = '~/cltk_data/originals/phi5'
works_dir_rel = '~/cltk_data/latin/text/phi5/individual_works'
file_prefix = 'LAT'
latin = True # this is for the optional TLGU argument to convert()
orig_dir = os.path.expanduser(orig_dir_rel)
works_dir = os.path.expanduser(works_dir_rel)
if not os.path.exists(works_dir):
os.makedirs(works_dir)
files = os.listdir(orig_dir)
texts = [x for x in files if x.endswith('.TXT') and x.startswith(file_prefix)]
for file in texts:
orig_file_path = os.path.join(orig_dir, file)
new_file_path = os.path.join(works_dir, file)
try:
self.convert(orig_file_path, new_file_path, divide_works=True, latin=latin)
logger.info('Writing files at %s to %s.', orig_file_path, works_dir)
except Exception as err:
logger.error('Failed to convert files: %s.', err) |
java | public static void setMinTime(int mintime) {
System.setProperty(MIN_TIME, String.valueOf(mintime));
TimerTrace.mintime = mintime;
} |
python | def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) |
python | def signin_redirect(redirect=None, user=None):
"""
Redirect user after successful sign in.
First looks for a ``requested_redirect``. If not supplied will fall-back to
the user specific account page. If all fails, will fall-back to the standard
Django ``LOGIN_REDIRECT_URL`` setting. Returns a string defining the URI to
go next.
:param redirect:
A value normally supplied by ``next`` form field. Gets preference
before the default view which requires the user.
:param user:
A ``User`` object specifying the user who has just signed in.
:return: String containing the URI to redirect to.
"""
if redirect: return redirect
elif user is not None:
return userena_settings.USERENA_SIGNIN_REDIRECT_URL % \
{'username': user.username}
else: return settings.LOGIN_REDIRECT_URL |
java | public Observable<Page<FirewallRuleInner>> listFirewallRulesAsync(final String resourceGroupName, final String accountName) {
return listFirewallRulesWithServiceResponseAsync(resourceGroupName, accountName)
.map(new Func1<ServiceResponse<Page<FirewallRuleInner>>, Page<FirewallRuleInner>>() {
@Override
public Page<FirewallRuleInner> call(ServiceResponse<Page<FirewallRuleInner>> response) {
return response.body();
}
});
} |
java | public static BinaryAnnotationMappingDeriver getInstance(String path) {
if (instance == null) {
synchronized (LOCK) {
if (instance == null) {
instance = path == null ? new BinaryAnnotationMappingDeriver() :
new BinaryAnnotationMappingDeriver(path);
}
}
}
return instance;
} |
python | def export_by_tag_csv(ekey, dstore):
"""
:param ekey: export key, i.e. a pair (datastore key, fmt)
:param dstore: datastore object
"""
token, tag = ekey[0].split('/')
data = extract(dstore, token + '/' + tag)
fnames = []
writer = writers.CsvWriter(fmt=writers.FIVEDIGITS)
for stat, arr in data:
tup = (ekey[0].replace('/', '-'), stat, ekey[1])
path = '%s-%s.%s' % tup
fname = dstore.export_path(path)
writer.save(arr, fname)
fnames.append(fname)
return fnames |
java | public void cancelSegments(IntSet cancelledSegments) {
if (segments.removeAll(cancelledSegments)) {
if (trace) {
log.tracef("Cancelling outbound transfer to node %s, segments %s (remaining segments %s)",
destination, cancelledSegments, segments);
}
entriesBySegment.keySet().removeAll(cancelledSegments); // here we do not update accumulatedEntries but this inaccuracy does not cause any harm
if (segments.isEmpty()) {
cancel();
}
}
} |
java | public void unpack(long[] buffer, int offset, int len, int bitSize, InputStream input)
throws IOException
{
checkArgument(len <= MAX_BUFFERED_POSITIONS, "Expected ORC files to have runs of at most 512 bit packed longs");
switch (bitSize) {
case 1:
unpack1(buffer, offset, len, input);
break;
case 2:
unpack2(buffer, offset, len, input);
break;
case 4:
unpack4(buffer, offset, len, input);
break;
case 8:
unpack8(buffer, offset, len, input);
break;
case 16:
unpack16(buffer, offset, len, input);
break;
case 24:
unpack24(buffer, offset, len, input);
break;
case 32:
unpack32(buffer, offset, len, input);
break;
case 40:
unpack40(buffer, offset, len, input);
break;
case 48:
unpack48(buffer, offset, len, input);
break;
case 56:
unpack56(buffer, offset, len, input);
break;
case 64:
unpack64(buffer, offset, len, input);
break;
default:
unpackGeneric(buffer, offset, len, bitSize, input);
}
} |
java | private void executeInternalBatch(int size) throws SQLException {
executeQueryPrologue(true);
results = new Results(this, 0, true, size, false, resultSetScrollType,
resultSetConcurrency, autoGeneratedKeys, protocol.getAutoIncrementIncrement());
if (protocol
.executeBatchClient(protocol.isMasterConnection(), results, prepareResult, parameterList,
hasLongData)) {
return;
}
//send query one by one, reading results for each query before sending another one
SQLException exception = null;
if (queryTimeout > 0) {
for (int batchQueriesCount = 0; batchQueriesCount < size; batchQueriesCount++) {
protocol.stopIfInterrupted();
try {
protocol.executeQuery(protocol.isMasterConnection(), results, prepareResult,
parameterList.get(batchQueriesCount));
} catch (SQLException e) {
if (options.continueBatchOnError) {
exception = e;
} else {
throw e;
}
}
}
} else {
for (int batchQueriesCount = 0; batchQueriesCount < size; batchQueriesCount++) {
try {
protocol.executeQuery(protocol.isMasterConnection(), results, prepareResult,
parameterList.get(batchQueriesCount));
} catch (SQLException e) {
if (options.continueBatchOnError) {
exception = e;
} else {
throw e;
}
}
}
}
if (exception != null) {
throw exception;
}
} |
java | public void EBEsAssignAxialForces(int hi){
AxialForcei_ = new double[numberOfElements_];
AxialForcej_ = new double[numberOfElements_];
for(int el=0;el<numberOfElements_;el++){
AxialForcei_[el] = Efforti_[aX_][el][hi];
AxialForcej_[el] = Effortj_[aX_][el][hi];
}
} |
java | boolean eligibleForLock(EJSDeployedSupport methodContext, ContainerTx tx) // d671368
{
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
// If the bean is active on a thread and the threads match, then allow
// the bean to be locked. This is a reentrant call, and transaction
// enlistment will fail, as required.
if (ivActiveOnThread != null) {
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "eligibleForLock : ActiveOnThread : " + (ivActiveOnThread == Thread.currentThread()));
return ivActiveOnThread == Thread.currentThread();
}
// If the bean is not active on a thread, then it has been unlocked
// and is eligible for use on any thread. (Note that unlock will not
// actually unlock if the bean is in a method and is switching between
// local and global transactions for bean managed.) d648385 F743-22462.CR
//
// And, the bean is not in a transaction or the transactions match, then
// the bean lock may be granted.
//
// Note that the transactions won't match if the bean is enlisted in a
// sticky transaction, but the method on the current thread was started
// and created a local tran prior to the sticky global being started.
// For this scenario, don't grant the lock... this thread will wait
// until the sticky transaction commits.
if (currentTx == null || currentTx == tx) {
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "eligibleForLock : CurrentTx : true");
return true;
}
// Finally, if the bean is not active on a thread, but it is enlisted in
// a transaction that doesn't match, then it seems that it is not eligible.
// And, if the bean's transaction was suspended on the current thread, and
// access timeout is -1, then a deadlock will occur, which would normally
// be an application problem. However, the spec requires that a call to
// the 2.1 component remove result in an exception if the bean is enlisted
// in a transaction. So check the current method context to see if the
// bean's transaction was suspended and if so, report eligible. A reentrant
// failure will be reported during enlistment. d704504
UOWCookie uowCookie = methodContext.uowCookie;
if (uowCookie != null) {
if (currentTx.ivTxKey.equals(uowCookie.getSuspendedTransactionalUOW())) {
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "eligibleForLock : SuspendedTx : true");
return true;
}
}
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "eligibleForLock : false");
return false;
} |
python | def unregisterHandler(self, fh):
"""
Unregister a file descriptor. Clean data, if such operation has been scheduled.
Parameters
----------
fh : int
File descriptor.
"""
try:
self.fds.remove(fh)
except KeyError:
pass
self.lock.acquire()
try:
self.data.rollover() # rollover data on close.
finally:
self.lock.release()
if self.closing and not self.fds:
self.data.close() |
python | def cli(ctx, project_dir):
"""Verify the verilog code."""
exit_code = SCons(project_dir).verify()
ctx.exit(exit_code) |
java | public void bulkAction(List<SQSMessageIdentifier> messageIdentifierList, int indexOfMessage)
throws JMSException {
assert indexOfMessage > 0;
assert indexOfMessage <= messageIdentifierList.size();
Map<String, List<String>> receiptHandleWithSameQueueUrl = new HashMap<String, List<String>>();
// Add all messages up to and including requested message into Map.
// Map contains key as queueUrl and value as list receiptHandles from
// that queueUrl.
for (int i = 0; i < indexOfMessage; i++) {
SQSMessageIdentifier messageIdentifier = messageIdentifierList.get(i);
String queueUrl = messageIdentifier.getQueueUrl();
List<String> receiptHandles = receiptHandleWithSameQueueUrl.get(queueUrl);
// if value of queueUrl is null create new list.
if (receiptHandles == null) {
receiptHandles = new ArrayList<String>();
receiptHandleWithSameQueueUrl.put(queueUrl, receiptHandles);
}
// add receiptHandle to the list.
receiptHandles.add(messageIdentifier.getReceiptHandle());
// Once there are 10 messages in messageBatch, apply the batch action
if (receiptHandles.size() == SQSMessagingClientConstants.MAX_BATCH) {
action(queueUrl, receiptHandles);
receiptHandles.clear();
}
}
// Flush rest of messages in map.
for (Entry<String, List<String>> entry : receiptHandleWithSameQueueUrl.entrySet()) {
action(entry.getKey(), entry.getValue());
}
} |
java | public java.rmi.Remote getPort(Class serviceEndpointInterface) throws javax.xml.rpc.ServiceException {
try {
if (com.google.api.ads.adwords.axis.v201809.o.TrafficEstimatorServiceInterface.class.isAssignableFrom(serviceEndpointInterface)) {
com.google.api.ads.adwords.axis.v201809.o.TrafficEstimatorServiceSoapBindingStub _stub = new com.google.api.ads.adwords.axis.v201809.o.TrafficEstimatorServiceSoapBindingStub(new java.net.URL(TrafficEstimatorServiceInterfacePort_address), this);
_stub.setPortName(getTrafficEstimatorServiceInterfacePortWSDDServiceName());
return _stub;
}
}
catch (java.lang.Throwable t) {
throw new javax.xml.rpc.ServiceException(t);
}
throw new javax.xml.rpc.ServiceException("There is no stub implementation for the interface: " + (serviceEndpointInterface == null ? "null" : serviceEndpointInterface.getName()));
} |
python | def update(self, repo_dir, **kwargs):
"""This function updates an existing checkout of source code."""
del kwargs
rev = self._args.get("revision")
if rev:
return [{"args": ["git", "checkout", rev], "cwd": repo_dir}] + _ff_command(
rev, repo_dir
)
return None |
python | def tagger(self):
"""
A property to link into IntentEngine's intent_parsers.
Warning: this is only for backwards compatiblility and should not be used if you
intend on using domains.
Return: the domains intent_parsers from its IntentEngine
"""
domain = 0
if domain not in self.domains:
self.register_domain(domain=domain)
return self.domains[domain].tagger |
java | public static Bridge get(final String id) throws Exception {
final BandwidthClient client = BandwidthClient.getInstance();
return get(client, id);
} |
java | public OperationExecutionRecord measureAfter() {
// measure after
final long tout = TIME.getTime();
OperationExecutionRecord record = new OperationExecutionRecord(signature, sessionId, traceId, tin, tout, hostname, eoi, ess);
CTRLINST.newMonitoringRecord(record);
// cleanup
if (entrypoint) {
CFREGISTRY.unsetThreadLocalTraceId();
CFREGISTRY.unsetThreadLocalEOI();
CFREGISTRY.unsetThreadLocalESS();
} else {
CFREGISTRY.storeThreadLocalESS(ess); // next operation is ess
}
return record;
} |
python | def get_file_perms(self,
filename,
note=None,
loglevel=logging.DEBUG):
"""Returns the permissions of the file on the target as an octal
string triplet.
@param filename: Filename to get permissions of.
@param note: See send()
@type filename: string
@rtype: string
"""
shutit = self.shutit
shutit.handle_note(note)
cmd = ' command stat -c %a ' + filename
self.send(ShutItSendSpec(self,
send=' ' + cmd,
check_exit=False,
echo=False,
loglevel=loglevel,
ignore_background=True))
res = shutit.match_string(self.pexpect_child.before, '([0-9][0-9][0-9])')
shutit.handle_note_after(note=note)
return res |
java | private double determinant3x3(double t00, double t01, double t02,
double t10, double t11, double t12,
double t20, double t21, double t22) {
return (t00 * (t11 * t22 - t12 * t21) +
t01 * (t12 * t20 - t10 * t22) +
t02 * (t10 * t21 - t11 * t20));
} |
java | protected Boolean _hasSideEffects(XIfExpression expression, ISideEffectContext context) {
if (hasSideEffects(expression.getIf(), context)) {
return true;
}
final Map<String, List<XExpression>> buffer1 = context.createVariableAssignmentBufferForBranch();
if (hasSideEffects(expression.getThen(), context.branch(buffer1))) {
return true;
}
final Map<String, List<XExpression>> buffer2 = context.createVariableAssignmentBufferForBranch();
if (hasSideEffects(expression.getElse(), context.branch(buffer2))) {
return true;
}
context.mergeBranchVariableAssignments(Arrays.asList(buffer1, buffer2));
return false;
} |
python | def parse_delay(data):
"""
Prase the delay
"""
# parse data from the details view
rsp = requests.get(data['details'])
soup = BeautifulSoup(rsp.text, "html.parser")
# get departure delay
delay_departure_raw = soup.find('div', class_="routeStart").find('span', class_=["delay", "delayOnTime"])
if delay_departure_raw:
delay_departure = calculate_delay(data['departure'],
delay_departure_raw.text)
else:
delay_departure = 0
# get arrival delay
delay_arrival_raw = soup.find('div', class_=["routeEnd","routeEndAdditional"]).find('span', class_=["delay", "delayOnTime"])
if delay_arrival_raw:
delay_arrival = calculate_delay(data['arrival'],
delay_arrival_raw.text)
else:
delay_arrival = 0
# save the parsed data
if delay_departure + delay_arrival == 0:
data['ontime'] = True
else:
data['ontime'] = False
data['delay'] = {
'delay_departure': int(delay_departure),
'delay_arrival': int(delay_arrival)
}
# TODO: this should not be hardcoded!
data['canceled'] = False
return data |
python | def addColumn(self, columnName, dtype, defaultValue):
"""Adds a column with the given parameters to the underlying model
This method is also a slot.
If no model is set, nothing happens.
Args:
columnName (str): The name of the new column.
dtype (numpy.dtype): The datatype of the new column.
defaultValue (object): Fill the column with this value.
"""
model = self.tableView.model()
if model is not None:
model.addDataFrameColumn(columnName, dtype, defaultValue)
self.addColumnButton.setChecked(False) |
java | @Override
@SuppressWarnings("fallthrough")
public Node optimizeSubtree(Node node) {
switch (node.getToken()) {
case ASSIGN_SUB:
return reduceSubstractionAssignment(node);
case TRUE:
case FALSE:
return reduceTrueFalse(node);
case NEW:
node = tryFoldStandardConstructors(node);
if (!node.isCall()) {
return node;
}
// Fall through on purpose because tryFoldStandardConstructors() may
// convert a NEW node into a CALL node
case CALL:
Node result = tryFoldLiteralConstructor(node);
if (result == node) {
result = tryFoldSimpleFunctionCall(node);
if (result == node) {
result = tryFoldImmediateCallToBoundFunction(node);
}
}
return result;
case RETURN:
return tryReduceReturn(node);
case COMMA:
// TODO(b/63630312): should flatten an entire comma expression in a single pass.
return trySplitComma(node);
case NAME:
return tryReplaceUndefined(node);
case ARRAYLIT:
return tryMinimizeArrayLiteral(node);
case GETPROP:
return tryMinimizeWindowRefs(node);
case TEMPLATELIT:
return tryTurnTemplateStringsToStrings(node);
case MUL:
case AND:
case OR:
case BITOR:
case BITXOR:
case BITAND:
return tryRotateAssociativeOperator(node);
default:
return node; //Nothing changed
}
} |
java | public static void closeScope(Object name) {
//we remove the scope first, so that other threads don't see it, and see the next snapshot of the tree
ScopeNode scope = (ScopeNode) MAP_KEY_TO_SCOPE.remove(name);
if (scope != null) {
ScopeNode parentScope = scope.getParentScope();
if (parentScope != null) {
parentScope.removeChild(scope);
} else {
ConfigurationHolder.configuration.onScopeForestReset();
}
removeScopeAndChildrenFromMap(scope);
}
} |
python | def create(cls, data=None, api_key=None, endpoint=None, add_headers=None,
data_key=None, response_data_key=None, method='POST', **kwargs):
"""
Create an instance of the Entity model by calling to the API endpoint.
This ensures that server knows about the creation before returning
the class instance.
NOTE: The server must return a response with the schema containing
the entire entity value. A True or False response is no bueno.
"""
inst = cls(api_key=api_key)
if data_key is None:
data_key = cls.sanitize_ep(cls.get_endpoint())
if response_data_key is None:
response_data_key = cls.sanitize_ep(cls.get_endpoint())
body = {}
body[data_key] = data
if endpoint is None:
endpoint = cls.get_endpoint()
inst._set(cls._parse(inst.request(method,
endpoint=endpoint,
data=body,
query_params=kwargs,
add_headers=add_headers,
),
key=response_data_key))
return inst |
python | def step2(self, pub_key, salt):
"""Second authentication step."""
self._check_initialized()
pk_str = binascii.hexlify(pub_key).decode()
salt = binascii.hexlify(salt).decode()
self.client_session_key, _, _ = self.session.process(pk_str, salt)
_LOGGER.debug('Client session key: %s', self.client_session_key)
# Generate client public and session key proof.
client_public = self.session.public
client_session_key_proof = self.session.key_proof
_LOGGER.debug('Client public: %s, proof: %s',
client_public, client_session_key_proof)
if not self.session.verify_proof(self.session.key_proof_hash):
raise AuthenticationError('proofs do not match (mitm?)')
return client_public, client_session_key_proof |
python | def radial_symmetry(mesh):
"""
Check whether a mesh has rotational symmetry.
Returns
-----------
symmetry : None or str
None No rotational symmetry
'radial' Symmetric around an axis
'spherical' Symmetric around a point
axis : None or (3,) float
Rotation axis or point
section : None or (3, 2) float
If radial symmetry provide vectors
to get cross section
"""
# if not a volume this is meaningless
if not mesh.is_volume:
return None, None, None
# the sorted order of the principal components of inertia (3,) float
order = mesh.principal_inertia_components.argsort()
# we are checking if a geometry has radial symmetry
# if 2 of the PCI are equal, it is a revolved 2D profile
# if 3 of the PCI (all of them) are equal it is a sphere
# thus we take the diff of the sorted PCI, scale it as a ratio
# of the largest PCI, and then scale to the tolerance we care about
# if tol is 1e-3, that means that 2 components are identical if they
# are within .1% of the maximum PCI.
diff = np.abs(np.diff(mesh.principal_inertia_components[order]))
diff /= np.abs(mesh.principal_inertia_components).max()
# diffs that are within tol of zero
diff_zero = (diff / 1e-3).astype(int) == 0
if diff_zero.all():
# this is the case where all 3 PCI are identical
# this means that the geometry is symmetric about a point
# examples of this are a sphere, icosahedron, etc
axis = mesh.principal_inertia_vectors[0]
section = mesh.principal_inertia_vectors[1:]
return 'spherical', axis, section
elif diff_zero.any():
# this is the case for 2/3 PCI are identical
# this means the geometry is symmetric about an axis
# probably a revolved 2D profile
# we know that only 1/2 of the diff values are True
# if the first diff is 0, it means if we take the first element
# in the ordered PCI we will have one of the non- revolve axis
# if the second diff is 0, we take the last element of
# the ordered PCI for the section axis
# if we wanted the revolve axis we would just switch [0,-1] to
# [-1,0]
# since two vectors are the same, we know the middle
# one is one of those two
section_index = order[np.array([[0, 1],
[1, -1]])[diff_zero]].flatten()
section = mesh.principal_inertia_vectors[section_index]
# we know the rotation axis is the sole unique value
# and is either first or last of the sorted values
axis_index = order[np.array([-1, 0])[diff_zero]][0]
axis = mesh.principal_inertia_vectors[axis_index]
return 'radial', axis, section
return None, None, None |
java | public static void sendRedirect(
HttpServletResponse response,
String location,
int status
) throws IllegalStateException, IOException {
// Response must not be committed
if(response.isCommitted()) throw new IllegalStateException("Unable to redirect: Response already committed");
response.setHeader("Location", location);
response.sendError(status);
} |
java | public ListObjectsResponse listObjects(String bucketName, String prefix) {
return this.listObjects(new ListObjectsRequest(bucketName, prefix));
} |
python | def _handle_read_chunk(self):
"""Some data can be read"""
new_data = b''
buffer_length = len(self.read_buffer)
try:
while buffer_length < self.MAX_BUFFER_SIZE:
try:
piece = self.recv(4096)
except OSError as e:
if e.errno == errno.EAGAIN:
# End of the available data
break
elif e.errno == errno.EIO and new_data:
# Hopefully we could read an error message before the
# actual termination
break
else:
raise
if not piece:
# A closed connection is indicated by signaling a read
# condition, and having recv() return 0.
break
new_data += piece
buffer_length += len(piece)
finally:
new_data = new_data.replace(b'\r', b'\n')
self.read_buffer += new_data
return new_data |
java | @Nullable
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public static IBinder optBinder(@Nullable Bundle bundle, @Nullable String key) {
return optBinder(bundle, key, null);
} |
java | public XObject execute(
XPathContext xctxt, int currentNode, DTM dtm, int expType)
throws javax.xml.transform.TransformerException
{
// For now, the current node is already pushed.
return execute(xctxt);
} |
java | @Override
public void setID(String id) {
String oldID = this.id;
this.id = prepareForAssignment(this.id, id);
this.registerOwnID(oldID, this.id);
} |
java | private void writeHeader(long fileLength, int elementCount, long firstPosition, long lastPosition)
throws IOException {
raf.seek(0);
if (versioned) {
writeInt(buffer, 0, VERSIONED_HEADER);
writeLong(buffer, 4, fileLength);
writeInt(buffer, 12, elementCount);
writeLong(buffer, 16, firstPosition);
writeLong(buffer, 24, lastPosition);
raf.write(buffer, 0, 32);
return;
}
// Legacy queue header.
writeInt(buffer, 0, (int) fileLength); // Signed, so leading bit is always 0 aka legacy.
writeInt(buffer, 4, elementCount);
writeInt(buffer, 8, (int) firstPosition);
writeInt(buffer, 12, (int) lastPosition);
raf.write(buffer, 0, 16);
} |
python | def fetch():
"""
Downloads the Lenz, Hensley & Doré (2017) dust map, placing it in the
default :obj:`dustmaps` data directory.
"""
doi = '10.7910/DVN/AFJNWJ'
fname = os.path.join(
data_dir(),
'lenz2017',
'ebv_lhd.hpx.fits')
fetch_utils.dataverse_download_doi(
doi, fname,
file_requirements={'filename': 'ebv_lhd.hpx.fits'}) |
java | private static String findJavaVersionString(MavenProject project) {
while (project != null) {
String target = project.getProperties().getProperty("maven.compiler.target");
if (target != null) {
return target;
}
for (org.apache.maven.model.Plugin plugin : CompoundList.of(project.getBuildPlugins(), project.getPluginManagement().getPlugins())) {
if ("maven-compiler-plugin".equals(plugin.getArtifactId())) {
if (plugin.getConfiguration() instanceof Xpp3Dom) {
Xpp3Dom node = ((Xpp3Dom) plugin.getConfiguration()).getChild("target");
if (node != null) {
return node.getValue();
}
}
}
}
project = project.getParent();
}
return null;
} |
java | public UI wrapLayout(Layout layout) {
// TODO: add a header to switch the style, etc
// TODO: add bookmark to set the style
if (server.getConfig().isDevelopmentHeader()) {
final VerticalSplitPanel mainLayout = new VerticalSplitPanel();
mainLayout.setSizeFull();
mainLayout.setSplitPosition(SPLIT_POSITION, Sizeable.Unit.PIXELS);
mainLayout.setLocked(true);
final DevApplicationHeader header = new DevApplicationHeader(server);
header.setSpacing(true);
mainLayout.setFirstComponent(header);
mainLayout.setSecondComponent(layout);
return new DevUI(mainLayout);
} else {
return new DevUI(layout);
}
} |
java | public static <T> TypeChecker<Collection<? extends T>> tCollection(TypeChecker<? extends T> elementChecker) {
return new CollectionTypeChecker<>(Collection.class, elementChecker);
} |
java | public void setPOMVersion(final String pomVersion) {
if (pomVersion == null && this.pomVersion == null) {
return;
} else if (pomVersion == null) {
removeChild(this.pomVersion);
this.pomVersion = null;
} else if (this.pomVersion == null) {
this.pomVersion = new KeyValueNode<String>(CommonConstants.CS_MAVEN_POM_VERSION_TITLE, pomVersion);
appendChild(this.pomVersion, false);
} else {
this.pomVersion.setValue(pomVersion);
}
} |
java | public static int count(Connection connection, String sql,
String[] selectionArgs) {
if (!sql.toLowerCase().contains(" count(*) ")) {
int index = sql.toLowerCase().indexOf(" from ");
if (index == -1) {
return -1;
}
sql = "select count(*)" + sql.substring(index);
}
int count = querySingleInteger(connection, sql, selectionArgs, true);
return count;
} |
python | def _add_study_provenance(
self,
phenotyping_center,
colony,
project_fullname,
pipeline_name,
pipeline_stable_id,
procedure_stable_id,
procedure_name,
parameter_stable_id,
parameter_name,
statistical_method,
resource_name,
row_num
):
"""
:param phenotyping_center: str, from self.files['all']
:param colony: str, from self.files['all']
:param project_fullname: str, from self.files['all']
:param pipeline_name: str, from self.files['all']
:param pipeline_stable_id: str, from self.files['all']
:param procedure_stable_id: str, from self.files['all']
:param procedure_name: str, from self.files['all']
:param parameter_stable_id: str, from self.files['all']
:param parameter_name: str, from self.files['all']
:param statistical_method: str, from self.files['all']
:param resource_name: str, from self.files['all']
:return: study bnode
"""
provenance_model = Provenance(self.graph)
model = Model(self.graph)
# Add provenance
# A study is a blank node equal to its parts
study_bnode = self.make_id("{0}{1}{2}{3}{4}{5}{6}{7}".format(
phenotyping_center, colony, project_fullname, pipeline_stable_id,
procedure_stable_id, parameter_stable_id, statistical_method,
resource_name), '_')
model.addIndividualToGraph(
study_bnode, None, self.globaltt['study'])
# List of nodes linked to study with has_part property
study_parts = []
# Add study parts
model.addIndividualToGraph(self.resolve(procedure_stable_id), procedure_name)
study_parts.append(self.resolve(procedure_stable_id))
study_parts.append(self.resolve(statistical_method))
provenance_model.add_study_parts(study_bnode, study_parts)
# Add parameter/measure statement: study measures parameter
parameter_label = "{0} ({1})".format(parameter_name, procedure_name)
logging.info("Adding Provenance")
model.addIndividualToGraph(
self.resolve(parameter_stable_id), parameter_label)
provenance_model.add_study_measure(
study_bnode, self.resolve(parameter_stable_id))
# Add Colony
colony_bnode = self.make_id("{0}".format(colony), '_')
model.addIndividualToGraph(colony_bnode, colony)
# Add study agent
model.addIndividualToGraph(
self.resolve(phenotyping_center), phenotyping_center,
self.globaltt['organization'])
# self.graph
model.addTriple(
study_bnode, self.globaltt['has_agent'], self.resolve(phenotyping_center))
# add pipeline and project
model.addIndividualToGraph(
self.resolve(pipeline_stable_id), pipeline_name)
# self.graph
model.addTriple(
study_bnode, self.globaltt['part_of'], self.resolve(pipeline_stable_id))
model.addIndividualToGraph(
self.resolve(project_fullname), project_fullname, self.globaltt['project'])
# self.graph
model.addTriple(
study_bnode, self.globaltt['part_of'], self.resolve(project_fullname))
return study_bnode |
java | public void terminate(boolean delay)
{
// Overload the value specified at pipe creation.
this.delay = delay;
// If terminate was already called, we can ignore the duplicit invocation.
if (state == State.TERM_REQ_SENT_1 || state == State.TERM_REQ_SENT_2) {
return;
}
// If the pipe is in the final phase of async termination, it's going to
// closed anyway. No need to do anything special here.
else if (state == State.TERM_ACK_SENT) {
return;
}
// The simple sync termination case. Ask the peer to terminate and wait
// for the ack.
else if (state == State.ACTIVE) {
sendPipeTerm(peer);
state = State.TERM_REQ_SENT_1;
}
// There are still pending messages available, but the user calls
// 'terminate'. We can act as if all the pending messages were read.
else if (state == State.WAITING_FOR_DELIMITER && !this.delay) {
outpipe = null;
sendPipeTermAck(peer);
state = State.TERM_ACK_SENT;
}
// If there are pending messages still available, do nothing.
else if (state == State.WAITING_FOR_DELIMITER) {
// do nothing
}
// We've already got delimiter, but not term command yet. We can ignore
// the delimiter and ack synchronously terminate as if we were in
// active state.
else if (state == State.DELIMITER_RECEIVED) {
sendPipeTerm(peer);
state = State.TERM_REQ_SENT_1;
}
// There are no other states.
else {
assert (false);
}
// Stop outbound flow of messages.
outActive = false;
if (outpipe != null) {
// Drop any unfinished outbound messages.
rollback();
// Write the delimiter into the pipe. Note that watermarks are not
// checked; thus the delimiter can be written even when the pipe is full.
Msg msg = new Msg();
msg.initDelimiter();
outpipe.write(msg, false);
flush();
}
} |
java | @Override
public boolean cancel() {
if (Config.DEBUG) {
Log.v(TAG, "cancel() " + getName());
}
return mInfo.queue.cancel(getName());
} |
python | def load_metadata(self, data_dir, feature_name=None):
"""See base class for details."""
# Restore names if defined
names_filepath = _get_names_filepath(data_dir, feature_name)
if tf.io.gfile.exists(names_filepath):
self.names = _load_names_from_file(names_filepath) |
python | def validate_page(ctx, param, value):
"""Ensure that a valid value for page is chosen."""
# pylint: disable=unused-argument
if value == 0:
raise click.BadParameter(
"Page is not zero-based, please set a value to 1 or higher.", param=param
)
return value |
python | def print_huffman_code_cwl(code,p,v):
""" code - code dictionary with symbol -> code map, p, v is probability map """
cwl = 0.0
for k,_v in code.items():
print(u"%s -> %s"%(k,_v))
cwl += p[v.index(k)]*len(_v)
print(u"cwl = %g"%cwl)
return cwl,code.values() |
java | public static void write(final String data, final Writer output) throws IOException {
if (data != null) {
output.write(data);
}
} |
python | def get_keys_from_value(self, value):
"""
Gets the keys from given value.
:param value: Value.
:type value: object
:return: Keys.
:rtype: object
"""
return [key for key, data in self.iteritems() if data == value] |
java | public BoxUser.Info getInfo(String... fields) {
URL url;
if (fields.length > 0) {
String queryString = new QueryStringBuilder().appendParam("fields", fields).toString();
url = USER_URL_TEMPLATE.buildWithQuery(this.getAPI().getBaseURL(), queryString, this.getID());
} else {
url = USER_URL_TEMPLATE.build(this.getAPI().getBaseURL(), this.getID());
}
BoxAPIRequest request = new BoxAPIRequest(this.getAPI(), url, "GET");
BoxJSONResponse response = (BoxJSONResponse) request.send();
JsonObject jsonObject = JsonObject.readFrom(response.getJSON());
return new Info(jsonObject);
} |
python | def _finalize_axis(self, key, **kwargs):
"""
Extends the ElementPlot _finalize_axis method to set appropriate
labels, and axes options for 3D Plots.
"""
axis = self.handles['axis']
self.handles['fig'].set_frameon(False)
axis.grid(self.show_grid)
axis.view_init(elev=self.elevation, azim=self.azimuth)
axis.dist = self.distance
if self.xaxis is None:
axis.w_xaxis.line.set_lw(0.)
axis.w_xaxis.label.set_text('')
if self.yaxis is None:
axis.w_yaxis.line.set_lw(0.)
axis.w_yaxis.label.set_text('')
if self.zaxis is None:
axis.w_zaxis.line.set_lw(0.)
axis.w_zaxis.label.set_text('')
if self.disable_axes:
axis.set_axis_off()
if mpl_version <= '1.5.9':
axis.set_axis_bgcolor(self.bgcolor)
else:
axis.set_facecolor(self.bgcolor)
return super(Plot3D, self)._finalize_axis(key, **kwargs) |
java | public void disableComputeNodeScheduling(String poolId, String nodeId, DisableComputeNodeSchedulingOption nodeDisableSchedulingOption, Iterable<BatchClientBehavior> additionalBehaviors) throws BatchErrorException, IOException {
ComputeNodeDisableSchedulingOptions options = new ComputeNodeDisableSchedulingOptions();
BehaviorManager bhMgr = new BehaviorManager(this.customBehaviors(), additionalBehaviors);
bhMgr.applyRequestBehaviors(options);
this.parentBatchClient.protocolLayer().computeNodes().disableScheduling(poolId, nodeId, nodeDisableSchedulingOption, options);
} |
java | public Query constrainedBy( Constraint constraint ) {
return new Query(source, constraint, orderings(), columns, getLimits(), distinct);
} |
python | def check_valid(number, input_base=10):
"""
Checks if there is an invalid digit in the input number.
Args:
number: An number in the following form:
(int, int, int, ... , '.' , int, int, int)
(iterable container) containing positive integers of the input base
input_base(int): The base of the input number.
Returns:
bool, True if all digits valid, else False.
Examples:
>>> check_valid((1,9,6,'.',5,1,6), 12)
True
>>> check_valid((8,1,15,9), 15)
False
"""
for n in number:
if n in (".", "[", "]"):
continue
elif n >= input_base:
if n == 1 and input_base == 1:
continue
else:
return False
return True |
java | private String createMailToLink(String to, String bcc, String cc,
String subject, String body) {
Validate.notNull(to, "You must define a to-address");
final StringBuilder urlBuilder = new StringBuilder("mailto:");
addEncodedValue(urlBuilder, "to", to);
if (bcc != null || cc != null || subject != null || body != null) {
urlBuilder.append('?');
}
addEncodedValue(urlBuilder, "bcc", bcc);
addEncodedValue(urlBuilder, "cc", cc);
addEncodedValue(urlBuilder, "subject", subject);
if (body != null) {
addEncodedValue(urlBuilder, "body", body.replace("$NL$", "\r\n"));
}
return urlBuilder.toString();
} |
java | public static <T extends java.util.Date> T setSeconds(final T date, final int amount) {
return set(date, Calendar.SECOND, amount);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.