language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | private String stripPrefix(final String input, final String searchString) {
return input.substring(searchString.length(), input.length()).trim();
} |
java | @Override
protected void visitLogNode(LogNode node) {
if (isComputableAsJsExprsVisitor.execOnChildren(node)) {
List<Expression> logMsgChunks = genJsExprsVisitor.execOnChildren(node);
jsCodeBuilder.append(WINDOW_CONSOLE_LOG.call(CodeChunkUtils.concatChunks(logMsgChunks)));
} else {
// Must build log msg in a local var logMsg_s##.
String outputVarName = "logMsg_s" + node.getId();
jsCodeBuilder.pushOutputVar(outputVarName);
visitChildren(node);
jsCodeBuilder.popOutputVar();
jsCodeBuilder.append(WINDOW_CONSOLE_LOG.call(id(outputVarName)));
}
} |
python | def fit(args, network, data_loader, **kwargs):
"""
train a model
args : argparse returns
network : the symbol definition of the nerual network
data_loader : function that returns the train and val data iterators
"""
# kvstore
kv = mx.kvstore.create(args.kv_store)
if args.gc_type != 'none':
kv.set_gradient_compression({'type': args.gc_type,
'threshold': args.gc_threshold})
if args.profile_server_suffix:
mx.profiler.set_config(filename=args.profile_server_suffix, profile_all=True, profile_process='server')
mx.profiler.set_state(state='run', profile_process='server')
if args.profile_worker_suffix:
if kv.num_workers > 1:
filename = 'rank' + str(kv.rank) + '_' + args.profile_worker_suffix
else:
filename = args.profile_worker_suffix
mx.profiler.set_config(filename=filename, profile_all=True, profile_process='worker')
mx.profiler.set_state(state='run', profile_process='worker')
# logging
head = '%(asctime)-15s Node[' + str(kv.rank) + '] %(message)s'
logging.basicConfig(level=logging.DEBUG, format=head)
logging.info('start with arguments %s', args)
epoch_size = get_epoch_size(args, kv)
# data iterators
(train, val) = data_loader(args, kv)
if 'dist' in args.kv_store and not 'async' in args.kv_store:
logging.info('Resizing training data to %d batches per machine', epoch_size)
# resize train iter to ensure each machine has same number of batches per epoch
# if not, dist_sync can hang at the end with one machine waiting for other machines
train = mx.io.ResizeIter(train, epoch_size)
if args.test_io:
tic = time.time()
for i, batch in enumerate(train):
if isinstance(batch, list):
for b in batch:
for j in b.data:
j.wait_to_read()
else:
for j in batch.data:
j.wait_to_read()
if (i + 1) % args.disp_batches == 0:
logging.info('Batch [%d]\tSpeed: %.2f samples/sec', i,
args.disp_batches * args.batch_size / (time.time() - tic))
tic = time.time()
return
# load model
if 'arg_params' in kwargs and 'aux_params' in kwargs:
arg_params = kwargs['arg_params']
aux_params = kwargs['aux_params']
else:
sym, arg_params, aux_params = _load_model(args, kv.rank)
if sym is not None:
assert sym.tojson() == network.tojson()
# save model
checkpoint = _save_model(args, kv.rank)
# devices for training
devs = mx.cpu() if args.gpus is None or args.gpus == "" else [
mx.gpu(int(i)) for i in args.gpus.split(',')]
# learning rate
lr, lr_scheduler = _get_lr_scheduler(args, kv)
# create model
model = mx.mod.Module(
context=devs,
symbol=network
)
lr_scheduler = lr_scheduler
optimizer_params = {
'learning_rate': lr,
'wd': args.wd,
'lr_scheduler': lr_scheduler,
'multi_precision': True}
# Only a limited number of optimizers have 'momentum' property
has_momentum = {'sgd', 'dcasgd', 'nag', 'signum', 'lbsgd'}
if args.optimizer in has_momentum:
optimizer_params['momentum'] = args.mom
monitor = mx.mon.Monitor(
args.monitor, pattern=".*") if args.monitor > 0 else None
# A limited number of optimizers have a warmup period
has_warmup = {'lbsgd', 'lbnag'}
if args.optimizer in has_warmup:
nworkers = kv.num_workers
if epoch_size < 1:
epoch_size = 1
macrobatch_size = args.macrobatch_size
if macrobatch_size < args.batch_size * nworkers:
macrobatch_size = args.batch_size * nworkers
#batch_scale = round(float(macrobatch_size) / args.batch_size / nworkers +0.4999)
batch_scale = math.ceil(
float(macrobatch_size) / args.batch_size / nworkers)
optimizer_params['updates_per_epoch'] = epoch_size
optimizer_params['begin_epoch'] = args.load_epoch if args.load_epoch else 0
optimizer_params['batch_scale'] = batch_scale
optimizer_params['warmup_strategy'] = args.warmup_strategy
optimizer_params['warmup_epochs'] = args.warmup_epochs
optimizer_params['num_epochs'] = args.num_epochs
if args.initializer == 'default':
if args.network == 'alexnet':
# AlexNet will not converge using Xavier
initializer = mx.init.Normal()
# VGG will not trend to converge using Xavier-Gaussian
elif args.network and 'vgg' in args.network:
initializer = mx.init.Xavier()
else:
initializer = mx.init.Xavier(
rnd_type='gaussian', factor_type="in", magnitude=2)
# initializer = mx.init.Xavier(factor_type="in", magnitude=2.34),
elif args.initializer == 'xavier':
initializer = mx.init.Xavier()
elif args.initializer == 'msra':
initializer = mx.init.MSRAPrelu()
elif args.initializer == 'orthogonal':
initializer = mx.init.Orthogonal()
elif args.initializer == 'normal':
initializer = mx.init.Normal()
elif args.initializer == 'uniform':
initializer = mx.init.Uniform()
elif args.initializer == 'one':
initializer = mx.init.One()
elif args.initializer == 'zero':
initializer = mx.init.Zero()
# evaluation metrices
eval_metrics = ['accuracy']
if args.top_k > 0:
eval_metrics.append(mx.metric.create(
'top_k_accuracy', top_k=args.top_k))
supported_loss = ['ce', 'nll_loss']
if len(args.loss) > 0:
# ce or nll loss is only applicable to softmax output
loss_type_list = args.loss.split(',')
if 'softmax_output' in network.list_outputs():
for loss_type in loss_type_list:
loss_type = loss_type.strip()
if loss_type == 'nll':
loss_type = 'nll_loss'
if loss_type not in supported_loss:
logging.warning(loss_type + ' is not an valid loss type, only cross-entropy or ' \
'negative likelihood loss is supported!')
else:
eval_metrics.append(mx.metric.create(loss_type))
else:
logging.warning("The output is not softmax_output, loss argument will be skipped!")
# callbacks that run after each batch
batch_end_callbacks = [mx.callback.Speedometer(
args.batch_size, args.disp_batches)]
if 'batch_end_callback' in kwargs:
cbs = kwargs['batch_end_callback']
batch_end_callbacks += cbs if isinstance(cbs, list) else [cbs]
# run
model.fit(train,
begin_epoch=args.load_epoch if args.load_epoch else 0,
num_epoch=args.num_epochs,
eval_data=val,
eval_metric=eval_metrics,
kvstore=kv,
optimizer=args.optimizer,
optimizer_params=optimizer_params,
initializer=initializer,
arg_params=arg_params,
aux_params=aux_params,
batch_end_callback=batch_end_callbacks,
epoch_end_callback=checkpoint,
allow_missing=True,
monitor=monitor)
if args.profile_server_suffix:
mx.profiler.set_state(state='run', profile_process='server')
if args.profile_worker_suffix:
mx.profiler.set_state(state='run', profile_process='worker') |
python | def get_vcs_details_output_vcs_details_node_vcs_mode(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vcs_details = ET.Element("get_vcs_details")
config = get_vcs_details
output = ET.SubElement(get_vcs_details, "output")
vcs_details = ET.SubElement(output, "vcs-details")
node_vcs_mode = ET.SubElement(vcs_details, "node-vcs-mode")
node_vcs_mode.text = kwargs.pop('node_vcs_mode')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
java | @Override
public int read() throws IOException, StreamIntegrityException {
if (trailerIn == null) {
initializeStream();
}
int result = pushbackInputStream.read();
return completeRead(result);
} |
python | def guess_github_repo():
"""
Guesses the github repo for the current directory
Returns False if no guess can be made.
"""
p = subprocess.run(['git', 'ls-remote', '--get-url', 'origin'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if p.stderr or p.returncode:
return False
url = p.stdout.decode('utf-8').strip()
m = GIT_URL.fullmatch(url)
if not m:
return False
return m.group(1) |
python | def _context_callbacks(app, key, original_context=_CONTEXT_MISSING):
"""Register the callbacks we need to properly pop and push the
app-local context for a component.
Args:
app (flask.Flask): The app who this context belongs to. This is the
only sender our Blinker signal will listen to.
key (str): The key on ``_CONTEXT_LOCALS`` that this app's context
listens to.
Kwargs:
original_context (dict): The original context present whenever
these callbacks were registered. We will restore the context to
this value whenever the app context gets popped.
Returns:
(function, function): A two-element tuple of the dynamic functions
we generated as appcontext callbacks. The first element is the
callback for ``appcontext_pushed`` (i.e., get and store the
current context) and the second element is the callback for
``appcontext_popped`` (i.e., restore the current context to
to it's original value).
"""
def _get_context(dummy_app):
"""Set the context proxy so that it points to a specific context.
"""
_CONTEXT_LOCALS.context = _CONTEXT_LOCALS(key) # pylint: disable=assigning-non-slot
def _clear_context(dummy_app):
"""Remove the context proxy that points to a specific context and
restore the original context, if there was one.
"""
try:
del _CONTEXT_LOCALS.context
except AttributeError:
pass
if original_context is not _CONTEXT_MISSING:
setattr(_CONTEXT_LOCALS, key, original_context)
# store for later so Blinker doesn't remove these listeners and so we
# don't add them twice
_CONTEXT_CALLBACK_MAP[app] = (_get_context, _clear_context)
# and listen for any app context changes
appcontext_pushed.connect(_get_context, app)
appcontext_popped.connect(_clear_context, app)
return (_get_context, _clear_context) |
python | def _python_installed(ret, python, user=None):
'''
Check to see if given python is installed.
'''
default = __salt__['pyenv.default'](runas=user)
for version in __salt__['pyenv.versions'](user):
if version == python:
ret['result'] = True
ret['comment'] = 'Requested python exists.'
ret['default'] = default == python
break
return ret |
python | def _to_add(self, **kwargs):
'''
Used for info1.
'''
if 'catid' in kwargs:
catid = kwargs['catid']
return self._to_add_with_category(catid)
else:
if 'uid' in kwargs and MPost.get_by_uid(kwargs['uid']):
# todo:
# self.redirect('/{0}/edit/{1}'.format(self.app_url_name, uid))
uid = kwargs['uid']
else:
uid = ''
self.render('post_{0}/post_add.html'.format(self.kind),
tag_infos=MCategory.query_all(by_order=True, kind=self.kind),
userinfo=self.userinfo,
kwd={'uid': uid, }) |
java | public void index(final ByteBuffer key, final ColumnFamily columnFamily, final long timestamp) {
if (indexQueue == null) {
indexInner(key, columnFamily, timestamp);
} else {
indexQueue.submitAsynchronous(key, new Runnable() {
@Override
public void run() {
indexInner(key, columnFamily, timestamp);
}
});
}
} |
java | public GISModel getModel () throws java.io.IOException {
checkModelType();
int correctionConstant = getCorrectionConstant();
double correctionParam = getCorrectionParameter();
String[] outcomeLabels = getOutcomes();
int[][] outcomePatterns = getOutcomePatterns();
String[] predLabels = getPredicates();
Context[] params = getParameters(outcomePatterns);
return new GISModel(params,
predLabels,
outcomeLabels,
correctionConstant,
correctionParam);
} |
python | def delaunay_3d(dataset, alpha=0, tol=0.001, offset=2.5):
"""Constructs a 3D Delaunay triangulation of the mesh.
This helps smooth out a rugged mesh.
Parameters
----------
alpha : float, optional
Distance value to control output of this filter. For a non-zero
alpha value, only verts, edges, faces, or tetra contained within
the circumsphere (of radius alpha) will be output. Otherwise, only
tetrahedra will be output.
tol : float, optional
tolerance to control discarding of closely spaced points.
This tolerance is specified as a fraction of the diagonal length
of the bounding box of the points.
offset : float, optional
multiplier to control the size of the initial, bounding Delaunay
triangulation.
"""
alg = vtk.vtkDelaunay3D()
alg.SetInputData(dataset)
alg.SetAlpha(alpha)
alg.SetTolerance(tol)
alg.SetOffset(offset)
alg.Update()
return _get_output(alg) |
java | public void delete(String resourceGroupName, String registryName) {
deleteWithServiceResponseAsync(resourceGroupName, registryName).toBlocking().single().body();
} |
java | @Override
public Connection connect(String url, Properties info) throws SQLException {
if (url == null) {
throw new SQLException("url is null");
}
// get defaults
Properties defaults;
if (!url.startsWith("jdbc:postgresql:")) {
return null;
}
try {
defaults = getDefaultProperties();
} catch (IOException ioe) {
throw new PSQLException(GT.tr("Error loading default settings from driverconfig.properties"),
PSQLState.UNEXPECTED_ERROR, ioe);
}
// override defaults with provided properties
Properties props = new Properties(defaults);
if (info != null) {
Set<String> e = info.stringPropertyNames();
for (String propName : e) {
String propValue = info.getProperty(propName);
if (propValue == null) {
throw new PSQLException(
GT.tr("Properties for the driver contains a non-string value for the key ")
+ propName,
PSQLState.UNEXPECTED_ERROR);
}
props.setProperty(propName, propValue);
}
}
// parse URL and add more properties
if ((props = parseURL(url, props)) == null) {
return null;
}
try {
// Setup java.util.logging.Logger using connection properties.
setupLoggerFromProperties(props);
LOGGER.log(Level.FINE, "Connecting with URL: {0}", url);
// Enforce login timeout, if specified, by running the connection
// attempt in a separate thread. If we hit the timeout without the
// connection completing, we abandon the connection attempt in
// the calling thread, but the separate thread will keep trying.
// Eventually, the separate thread will either fail or complete
// the connection; at that point we clean up the connection if
// we managed to establish one after all. See ConnectThread for
// more details.
long timeout = timeout(props);
if (timeout <= 0) {
return makeConnection(url, props);
}
ConnectThread ct = new ConnectThread(url, props);
Thread thread = new Thread(ct, "PostgreSQL JDBC driver connection thread");
thread.setDaemon(true); // Don't prevent the VM from shutting down
thread.start();
return ct.getResult(timeout);
} catch (PSQLException ex1) {
LOGGER.log(Level.FINE, "Connection error: ", ex1);
// re-throw the exception, otherwise it will be caught next, and a
// org.postgresql.unusual error will be returned instead.
throw ex1;
} catch (java.security.AccessControlException ace) {
throw new PSQLException(
GT.tr(
"Your security policy has prevented the connection from being attempted. You probably need to grant the connect java.net.SocketPermission to the database server host and port that you wish to connect to."),
PSQLState.UNEXPECTED_ERROR, ace);
} catch (Exception ex2) {
LOGGER.log(Level.FINE, "Unexpected connection error: ", ex2);
throw new PSQLException(
GT.tr(
"Something unusual has occurred to cause the driver to fail. Please report this exception."),
PSQLState.UNEXPECTED_ERROR, ex2);
}
} |
java | public final EObject ruleXConditionalExpression() throws RecognitionException {
EObject current = null;
Token lv_conditionalExpression_2_0=null;
Token otherlv_4=null;
EObject this_XOrExpression_0 = null;
EObject lv_then_3_0 = null;
EObject lv_else_5_0 = null;
enterRule();
try {
// InternalPureXbase.g:792:2: ( (this_XOrExpression_0= ruleXOrExpression ( ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) ) ( (lv_then_3_0= ruleXExpression ) ) ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )? )? ) )
// InternalPureXbase.g:793:2: (this_XOrExpression_0= ruleXOrExpression ( ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) ) ( (lv_then_3_0= ruleXExpression ) ) ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )? )? )
{
// InternalPureXbase.g:793:2: (this_XOrExpression_0= ruleXOrExpression ( ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) ) ( (lv_then_3_0= ruleXExpression ) ) ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )? )? )
// InternalPureXbase.g:794:3: this_XOrExpression_0= ruleXOrExpression ( ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) ) ( (lv_then_3_0= ruleXExpression ) ) ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )? )?
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConditionalExpressionAccess().getXOrExpressionParserRuleCall_0());
}
pushFollow(FOLLOW_15);
this_XOrExpression_0=ruleXOrExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XOrExpression_0;
afterParserOrEnumRuleCall();
}
// InternalPureXbase.g:802:3: ( ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) ) ( (lv_then_3_0= ruleXExpression ) ) ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )? )?
int alt19=2;
int LA19_0 = input.LA(1);
if ( (LA19_0==21) ) {
int LA19_1 = input.LA(2);
if ( (synpred8_InternalPureXbase()) ) {
alt19=1;
}
}
switch (alt19) {
case 1 :
// InternalPureXbase.g:803:4: ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) ) ( (lv_then_3_0= ruleXExpression ) ) ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )?
{
// InternalPureXbase.g:803:4: ( ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) ) )
// InternalPureXbase.g:804:5: ( '?' )=> ( () ( (lv_conditionalExpression_2_0= '?' ) ) )
{
// InternalPureXbase.g:805:5: ( () ( (lv_conditionalExpression_2_0= '?' ) ) )
// InternalPureXbase.g:806:6: () ( (lv_conditionalExpression_2_0= '?' ) )
{
// InternalPureXbase.g:806:6: ()
// InternalPureXbase.g:807:7:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElementAndSet(
grammarAccess.getXConditionalExpressionAccess().getXIfExpressionIfAction_1_0_0_0(),
current);
}
}
// InternalPureXbase.g:813:6: ( (lv_conditionalExpression_2_0= '?' ) )
// InternalPureXbase.g:814:7: (lv_conditionalExpression_2_0= '?' )
{
// InternalPureXbase.g:814:7: (lv_conditionalExpression_2_0= '?' )
// InternalPureXbase.g:815:8: lv_conditionalExpression_2_0= '?'
{
lv_conditionalExpression_2_0=(Token)match(input,21,FOLLOW_3); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(lv_conditionalExpression_2_0, grammarAccess.getXConditionalExpressionAccess().getConditionalExpressionQuestionMarkKeyword_1_0_0_1_0());
}
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXConditionalExpressionRule());
}
setWithLastConsumed(current, "conditionalExpression", true, "?");
}
}
}
}
}
// InternalPureXbase.g:829:4: ( (lv_then_3_0= ruleXExpression ) )
// InternalPureXbase.g:830:5: (lv_then_3_0= ruleXExpression )
{
// InternalPureXbase.g:830:5: (lv_then_3_0= ruleXExpression )
// InternalPureXbase.g:831:6: lv_then_3_0= ruleXExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConditionalExpressionAccess().getThenXExpressionParserRuleCall_1_1_0());
}
pushFollow(FOLLOW_16);
lv_then_3_0=ruleXExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConditionalExpressionRule());
}
set(
current,
"then",
lv_then_3_0,
"org.eclipse.xtext.xbase.Xbase.XExpression");
afterParserOrEnumRuleCall();
}
}
}
// InternalPureXbase.g:848:4: ( ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) ) )?
int alt18=2;
int LA18_0 = input.LA(1);
if ( (LA18_0==22) ) {
int LA18_1 = input.LA(2);
if ( (synpred9_InternalPureXbase()) ) {
alt18=1;
}
}
switch (alt18) {
case 1 :
// InternalPureXbase.g:849:5: ( ( ':' )=>otherlv_4= ':' ) ( (lv_else_5_0= ruleXExpression ) )
{
// InternalPureXbase.g:849:5: ( ( ':' )=>otherlv_4= ':' )
// InternalPureXbase.g:850:6: ( ':' )=>otherlv_4= ':'
{
otherlv_4=(Token)match(input,22,FOLLOW_3); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_4, grammarAccess.getXConditionalExpressionAccess().getColonKeyword_1_2_0());
}
}
// InternalPureXbase.g:856:5: ( (lv_else_5_0= ruleXExpression ) )
// InternalPureXbase.g:857:6: (lv_else_5_0= ruleXExpression )
{
// InternalPureXbase.g:857:6: (lv_else_5_0= ruleXExpression )
// InternalPureXbase.g:858:7: lv_else_5_0= ruleXExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConditionalExpressionAccess().getElseXExpressionParserRuleCall_1_2_1_0());
}
pushFollow(FOLLOW_2);
lv_else_5_0=ruleXExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConditionalExpressionRule());
}
set(
current,
"else",
lv_else_5_0,
"org.eclipse.xtext.xbase.Xbase.XExpression");
afterParserOrEnumRuleCall();
}
}
}
}
break;
}
}
break;
}
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} |
python | def switch_to_frame(self, frame, timeout=settings.SMALL_TIMEOUT):
""" Sets driver control to the specified browser frame. """
if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT:
timeout = self.__get_new_timeout(timeout)
page_actions.switch_to_frame(self.driver, frame, timeout) |
java | private void initializeContentBuffer() throws IOException {
if (this.seekEnabled) {
this.buffer = ByteBuffer.allocate(this.fileSize - HEADER_BYTES);
final int read = this.stream.read(this.buffer);
if (read < 0) {
throw new EOFException();
}
this.buffer.rewind();
this.buffer.limit(read);
this.bufferPosition = HEADER_BYTES;
} else {
this.buffer = ByteBuffer.allocate(BLOCK_SIZE);
final int read = this.stream.read(this.buffer);
if (read < 0) {
throw new EOFException();
}
this.buffer.rewind();
this.buffer.limit(read);
this.bufferPosition = HEADER_BYTES;
}
} |
python | def set_symbol(self, feature_id, symbol, organism=None, sequence=None):
"""
Set a feature's description
:type feature_id: str
:param feature_id: Feature UUID
:type symbol: str
:param symbol: Feature symbol
:type organism: str
:param organism: Organism Common Name
:type sequence: str
:param sequence: Sequence Name
:rtype: dict
:return: A standard apollo feature dictionary ({"features": [{...}]})
"""
data = {
'features': [
{
'uniquename': feature_id,
'symbol': symbol,
}
],
}
data = self._update_data(data, organism, sequence)
return self.post('setSymbol', data) |
python | def point_type(name, fields, srid_map):
""" Dynamically create a Point subclass.
"""
def srid(self):
try:
return srid_map[len(self)]
except KeyError:
return None
attributes = {"srid": property(srid)}
for index, subclass_field in enumerate(fields):
def accessor(self, i=index, f=subclass_field):
try:
return self[i]
except IndexError:
raise AttributeError(f)
for field_alias in {subclass_field, "xyz"[index]}:
attributes[field_alias] = property(accessor)
cls = type(name, (Point,), attributes)
with __srid_table_lock:
for dim, srid in srid_map.items():
__srid_table[srid] = (cls, dim)
return cls |
python | def files_walker(directory, filters_in=None, filters_out=None, flags=0):
"""
Defines a generator used to walk files using given filters.
Usage::
>>> for file in files_walker("./foundations/tests/tests_foundations/resources/standard/level_0"):
... print(file)
...
./foundations/tests/tests_foundations/resources/standard/level_0/level_1/level_2/standard.sIBLT
./foundations/tests/tests_foundations/resources/standard/level_0/level_1/lorem_ipsum.txt
./foundations/tests/tests_foundations/resources/standard/level_0/level_1/standard.rc
./foundations/tests/tests_foundations/resources/standard/level_0/standard.ibl
>>> for file in files_walker("./foundations/tests/tests_foundations/resources/standard/level_0", ("\.sIBLT",)):
... print(file)
...
./foundations/tests/tests_foundations/resources/standard/level_0/level_1/level_2/standard.sIBLT
:param directory: Directory to recursively walk.
:type directory: unicode
:param filters_in: Regex filters in list.
:type filters_in: tuple or list
:param filters_in: Regex filters out list.
:type filters_in: tuple or list
:param flags: Regex flags.
:type flags: int
:return: File.
:rtype: unicode
"""
if filters_in:
LOGGER.debug("> Current filters in: '{0}'.".format(filters_in))
if filters_out:
LOGGER.debug("> Current filters out: '{0}'.".format(filters_out))
for parent_directory, directories, files in os.walk(directory, topdown=False, followlinks=True):
for file in files:
LOGGER.debug("> Current file: '{0}' in '{1}'.".format(file, directory))
path = foundations.strings.to_forward_slashes(os.path.join(parent_directory, file))
if os.path.isfile(path):
if not foundations.strings.filter_words((path,), filters_in, filters_out, flags):
continue
LOGGER.debug("> '{0}' file filtered in!".format(path))
yield path |
java | public void scale(float scale) {
if (scale != 1.0f) {
left = (int) (left * scale + 0.5f);
top = (int) (top * scale + 0.5f);
right = (int) (right * scale + 0.5f);
bottom = (int) (bottom * scale + 0.5f);
}
} |
java | public final T addCssClass(String value) {
if (StringUtils.isNotEmpty(value)) {
return setCssClass(StringUtils.isNotEmpty(getCssClass()) ? getCssClass() + " " + value : value);
}
else {
return (T)this;
}
} |
java | private boolean checkNumArguments() {
//Also, since we're iterating over all options and args, use this opportunity to recreate the commandLineString
final StringBuilder commandLineString = new StringBuilder();
try {
for (final OptionDefinition optionDefinition : optionDefinitions) {
final String fullName = prefixDot + optionDefinition.name;
final StringBuilder mutextOptionNames = new StringBuilder();
for (final String mutexOption : optionDefinition.mutuallyExclusive) {
final OptionDefinition mutextOptionDef = optionMap.get(mutexOption);
if (mutextOptionDef != null && mutextOptionDef.hasBeenSet) {
mutextOptionNames.append(' ').append(prefixDot).append(mutextOptionDef.name);
}
}
if (optionDefinition.hasBeenSet && mutextOptionNames.length() > 0) {
messageStream.println("ERROR: Option '" + fullName +
"' cannot be used in conjunction with option(s)" +
mutextOptionNames.toString());
return false;
}
if (optionDefinition.isCollection) {
final Collection<?> c = (Collection<?>) optionDefinition.field.get(optionDefinition.parent);
if (c.size() < optionDefinition.minElements) {
messageStream.println("ERROR: Option '" + fullName + "' must be specified at least " +
optionDefinition.minElements + " times.");
return false;
}
} else if (!optionDefinition.optional && !optionDefinition.hasBeenSet &&
!optionDefinition.hasBeenSetFromParent && mutextOptionNames.length() == 0) {
messageStream.print("ERROR: Option '" + fullName + "' is required");
if (optionDefinition.mutuallyExclusive.isEmpty()) {
messageStream.println(".");
} else {
messageStream.println(" unless any of " + optionDefinition.mutuallyExclusive +
" are specified.");
}
return false;
}
}
if (positionalArguments != null) {
final Collection<?> c = (Collection<?>) positionalArguments.get(callerOptions);
if (c.size() < minPositionalArguments) {
messageStream.println("ERROR: At least " + minPositionalArguments +
" positional arguments must be specified.");
return false;
}
for (final Object posArg : c) {
commandLineString.append(' ').append(posArg.toString());
}
}
//first, append args that were explicitly set
for (final OptionDefinition optionDefinition : optionDefinitions) {
if (optionDefinition.hasBeenSet) {
commandLineString.append(' ').append(prefixDot).append(optionDefinition.name).append('=').append(
optionDefinition.field.get(optionDefinition.parent));
}
}
commandLineString.append(" "); //separator to tell the 2 apart
//next, append args that weren't explicitly set, but have a default value
for (final OptionDefinition optionDefinition : optionDefinitions) {
if (!optionDefinition.hasBeenSet && !optionDefinition.defaultValue.equals("null")) {
commandLineString.append(' ').append(prefixDot).append(optionDefinition.name).append('=').append(
optionDefinition.defaultValue);
}
}
this.commandLine += commandLineString.toString();
return true;
} catch (final IllegalAccessException e) {
// Should never happen because lack of publicness has already been checked.
throw new RuntimeException(e);
}
} |
java | public static RoundedMoney of(String currencyCode, Number number, MonetaryContext monetaryContext,
MonetaryOperator rounding) {
return new RoundedMoney(number, Monetary.getCurrency(currencyCode),
DEFAULT_MONETARY_CONTEXT.toBuilder().importContext(monetaryContext).build(), rounding);
} |
python | def get_titles():
'''returns titles of all open windows'''
if os.name == 'posix':
for proc in get_processes():
cmd = ['xdotool','search','--name', proc]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
window_ids = proc.communicate()[0].decode('utf-8')
if window_ids:
for window_id in window_ids.split('\n'):
cmd = ['xdotool','getwindowname',window_id]
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
title = proc.communicate()[0].decode('utf-8')
try:
if title[-1] == '\n':
title = title[:-1]
yield title
except IndexError:
pass
else:
raise NotImplementedError |
java | protected JComponent createButtonBar() {
this.dialogCommandGroup = getCommandManager().createCommandGroup(null, getCommandGroupMembers());
JComponent buttonBar = this.dialogCommandGroup.createButtonBar();
GuiStandardUtils.attachDialogBorder(buttonBar);
return buttonBar;
} |
java | protected <T> T handleResponse(OkHttpClient client, Request.Builder requestBuilder, Class<T> type, Map<String, String> parameters) throws ExecutionException, InterruptedException, KubernetesClientException, IOException {
VersionUsageUtils.log(this.resourceT, this.apiGroupVersion);
Request request = requestBuilder.build();
Response response = client.newCall(request).execute();
try (ResponseBody body = response.body()) {
assertResponseCode(request, response);
if (type != null) {
try (InputStream bodyInputStream = body.byteStream()) {
return Serialization.unmarshal(bodyInputStream, type, parameters);
}
} else {
return null;
}
} catch (Exception e) {
if (e instanceof KubernetesClientException) {
throw e;
}
throw requestException(request, e);
} finally {
if(response != null && response.body() != null) {
response.body().close();
}
}
} |
java | @Override
public T addAsManifestResource(File resource, ArchivePath target) throws IllegalArgumentException {
Validate.notNull(resource, "Resource should be specified");
Validate.notNull(target, "Target should be specified");
if (resource.isFile()) {
return addAsManifestResource(new FileAsset(resource), target);
}
final File[] files = resource.listFiles();
// SHRINKWRAP-275, resource URL coming in from a JAR
if (files == null) {
return this.addNestedJarFileResource(resource, target, this.getManifestPath());
}
if (files.length == 0) {
return addAsManifestResource(new FileAsset(resource), target);
}
for (File file : resource.listFiles()) {
ArchivePath child = ArchivePaths.create(file.getName());
addAsManifestResource(file, new BasicPath(target, child));
}
return covarientReturn();
} |
java | public void formatUnits(List<UnitValue> values, StringBuilder destination, UnitFormatOptions options) {
int size = values.size();
for (int i = 0; i < size; i++) {
if (i > 0) {
destination.append(' ');
}
formatUnit(values.get(i), destination, options);
}
} |
python | def get_scenario(scenario_id,**kwargs):
"""
Get the specified scenario
"""
user_id = kwargs.get('user_id')
scen_i = _get_scenario(scenario_id, user_id)
scen_j = JSONObject(scen_i)
rscen_rs = db.DBSession.query(ResourceScenario).filter(ResourceScenario.scenario_id==scenario_id).options(joinedload_all('dataset.metadata')).all()
#lazy load resource attributes and attributes
for rs in rscen_rs:
rs.resourceattr
rs.resourceattr.attr
rgi_rs = db.DBSession.query(ResourceGroupItem).filter(ResourceGroupItem.scenario_id==scenario_id).all()
scen_j.resourcescenarios = []
for rs in rscen_rs:
rs_j = JSONObject(rs, extras={'resourceattr':JSONObject(rs.resourceattr)})
if rs.dataset.check_read_permission(user_id, do_raise=False) is False:
rs_j.dataset['value'] = None
rs_j.dataset.metadata = JSONObject({})
scen_j.resourcescenarios.append(rs_j)
scen_j.resourcegroupitems =[JSONObject(r) for r in rgi_rs]
return scen_j |
java | @Override
public java.util.concurrent.Future<DescribeTableResult> describeTableAsync(String tableName,
com.amazonaws.handlers.AsyncHandler<DescribeTableRequest, DescribeTableResult> asyncHandler) {
return describeTableAsync(new DescribeTableRequest().withTableName(tableName), asyncHandler);
} |
python | def merge_groundings(stmts_in):
"""Gather and merge original grounding information from evidences.
Each Statement's evidences are traversed to find original grounding
information. These groundings are then merged into an overall consensus
grounding dict with as much detail as possible.
The current implementation is only applicable to Statements whose
concept/agent roles are fixed. Complexes, Associations and Conversions
cannot be handled correctly.
Parameters
----------
stmts_in : list[indra.statements.Statement]
A list of INDRA Statements whose groundings should be merged. These
Statements are meant to have been preassembled and potentially have
multiple pieces of evidence.
Returns
-------
stmts_out : list[indra.statements.Statement]
The list of Statements now with groundings merged at the Statement
level.
"""
def surface_grounding(stmt):
# Find the "best" grounding for a given concept and its evidences
# and surface that
for idx, concept in enumerate(stmt.agent_list()):
if concept is None:
continue
aggregate_groundings = {}
for ev in stmt.evidence:
if 'agents' in ev.annotations:
groundings = ev.annotations['agents']['raw_grounding'][idx]
for ns, value in groundings.items():
if ns not in aggregate_groundings:
aggregate_groundings[ns] = []
if isinstance(value, list):
aggregate_groundings[ns] += value
else:
aggregate_groundings[ns].append(value)
best_groundings = get_best_groundings(aggregate_groundings)
concept.db_refs = best_groundings
def get_best_groundings(aggregate_groundings):
best_groundings = {}
for ns, values in aggregate_groundings.items():
# There are 3 possibilities here
# 1. All the entries in the list are scored in which case we
# get unique entries and sort them by score
if all([isinstance(v, (tuple, list)) for v in values]):
best_groundings[ns] = []
for unique_value in {v[0] for v in values}:
scores = [v[1] for v in values if v[0] == unique_value]
best_groundings[ns].append((unique_value, max(scores)))
best_groundings[ns] = \
sorted(best_groundings[ns], key=lambda x: x[1],
reverse=True)
# 2. All the entries in the list are unscored in which case we
# get the highest frequency entry
elif all([not isinstance(v, (tuple, list)) for v in values]):
best_groundings[ns] = max(set(values), key=values.count)
# 3. There is a mixture, which can happen when some entries were
# mapped with scores and others had no scores to begin with.
# In this case, we again pick the highest frequency non-scored
# entry assuming that the unmapped version is more reliable.
else:
unscored_vals = [v for v in values
if not isinstance(v, (tuple, list))]
best_groundings[ns] = max(set(unscored_vals),
key=unscored_vals.count)
return best_groundings
stmts_out = []
for stmt in stmts_in:
if not isinstance(stmt, (Complex, Conversion)):
surface_grounding(stmt)
stmts_out.append(stmt)
return stmts_out |
java | public java.util.List<Queue> getQueues() {
if (queues == null) {
queues = new com.amazonaws.internal.SdkInternalList<Queue>();
}
return queues;
} |
java | private Criteria buildCriteria(QueryModel queryModel) {
Criteria criteria = getCurrentSession().createCriteria(persistentClass);
if (queryModel.getConditions() != null) {
for (Condition condition : queryModel.getConditions()) {
criteria.add((Criterion) condition.getConstraint());
}
}
for (Map.Entry<String, List<Condition>> associationCriteriaEntry : queryModel.getAssociationConditions().entrySet()) {
Criteria associationCriteria = criteria.createCriteria(associationCriteriaEntry.getKey());
criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
for (Condition condition : associationCriteriaEntry.getValue()) {
associationCriteria.add((Criterion) condition.getConstraint());
}
}
if (queryModel.getProjection() != null) {
ProjectionList projectionList = Projections.projectionList();
projectionList.add((org.hibernate.criterion.Projection) queryModel.getProjection().getDetails());
criteria.setProjection(projectionList);
}
return criteria;
} |
java | protected void unassignFromUserObjectInDb(final Type _unassignType,
final JAASSystem _jaasSystem,
final AbstractUserObject _object)
throws EFapsException
{
Connection con = null;
try {
con = Context.getConnection();
Statement stmt = null;
final StringBuilder cmd = new StringBuilder();
try {
cmd.append("delete from ").append(_unassignType.getMainTable().getSqlTable()).append(" ").append(
"where USERJAASSYSTEM=").append(_jaasSystem.getId()).append(" ").append(
"and USERABSTRACTFROM=").append(getId()).append(" ").append("and USERABSTRACTTO=")
.append(_object.getId());
stmt = con.createStatement();
stmt.executeUpdate(cmd.toString());
} catch (final SQLException e) {
AbstractUserObject.LOG.error("could not execute '" + cmd.toString()
+ "' to unassign user object '" + toString()
+ "' from object '" + _object + "' for JAAS system '" + _jaasSystem + "' ", e);
throw new EFapsException(getClass(), "unassignFromUserObjectInDb.SQLException", e, cmd.toString(),
getName());
} finally {
try {
if (stmt != null) {
stmt.close();
}
con.commit();
} catch (final SQLException e) {
AbstractUserObject.LOG.error("Could not close a statement.", e);
}
}
} finally {
try {
if (con != null && !con.isClosed()) {
con.close();
}
} catch (final SQLException e) {
throw new CacheReloadException("Cannot read a type for an attribute.", e);
}
}
} |
python | def cosine_similarity(F_a, F_b):
"""
Calculate `cosine similarity
<http://en.wikipedia.org/wiki/Cosine_similarity>`_ for sparse feature
vectors.
Parameters
----------
F_a : :class:`.Feature`
F_b : :class:`.Feature`
Returns
-------
similarity : float
Cosine similarity.
"""
shared = list(F_a.unique & F_b.unique)
A = [dict(F_a.norm)[i] for i in shared]
B = [dict(F_b.norm)[i] for i in shared]
dot = sum(map(lambda a, b: a*b, A, B))
mag_A = sqrt(sum(map(lambda a: a**2, A)))
mag_B = sqrt(sum(map(lambda a: a**2, B)))
return dot / (mag_A + mag_B) |
python | def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs):
"""Abstract method for running a Scene method on each Scene.
Additionally, modifies current MultiScene or creates a new one if needed.
"""
new_gen = self._call_scene_func(gen, func_name, create_new_scene, *args, **kwargs)
new_gen = new_gen if self.is_generator else list(new_gen)
if create_new_scene:
return self.__class__(new_gen)
self._scene_gen = _SceneGenerator(new_gen)
self._scenes = iter(self._scene_gen) |
python | def enable_plugin(name, runas=None):
'''
Enable a RabbitMQ plugin via the rabbitmq-plugins command.
CLI Example:
.. code-block:: bash
salt '*' rabbitmq.enable_plugin foo
'''
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
cmd = [_get_rabbitmq_plugin(), 'enable', name]
ret = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
return _format_response(ret, 'Enabled') |
python | def colorspace(im, bw=False, replace_alpha=False, **kwargs):
"""
Convert images to the correct color space.
A passive option (i.e. always processed) of this method is that all images
(unless grayscale) are converted to RGB colorspace.
This processor should be listed before :func:`scale_and_crop` so palette is
changed before the image is resized.
bw
Make the thumbnail grayscale (not really just black & white).
replace_alpha
Replace any transparency layer with a solid color. For example,
``replace_alpha='#fff'`` would replace the transparency layer with
white.
"""
if im.mode == 'I':
# PIL (and pillow) have can't convert 16 bit grayscale images to lower
# modes, so manually convert them to an 8 bit grayscale.
im = im.point(list(_points_table()), 'L')
is_transparent = utils.is_transparent(im)
is_grayscale = im.mode in ('L', 'LA')
new_mode = im.mode
if is_grayscale or bw:
new_mode = 'L'
else:
new_mode = 'RGB'
if is_transparent:
if replace_alpha:
if im.mode != 'RGBA':
im = im.convert('RGBA')
base = Image.new('RGBA', im.size, replace_alpha)
base.paste(im, mask=im)
im = base
else:
new_mode = new_mode + 'A'
if im.mode != new_mode:
im = im.convert(new_mode)
return im |
java | public static PoliciesCache fromSourceProvider(
final SourceProvider provider,
final Set<Attribute> forcedContext
)
{
return new PoliciesCache(provider, forcedContext);
} |
python | def _get_answer_spans(answer_list, answer_start_list):
"""Find all answer spans from the context, returning start_index and end_index
:param list[str] answer_list: List of all answers
:param list[int] answer_start_list: List of all answers' start indices
Returns
-------
List[Tuple]
list of Tuple(answer_start_index answer_end_index) per question
"""
return [(answer_start_list[i], answer_start_list[i] + len(answer))
for i, answer in enumerate(answer_list)] |
java | public void drawOval(float x1, float y1, float width, float height) {
drawOval(x1, y1, width, height, DEFAULT_SEGMENTS);
} |
java | public static <T> List<T> selectRandomSubset (Collection<T> col, int count)
{
int csize = col.size();
if (csize < count) {
String errmsg = "Cannot select " + count + " elements " +
"from a collection of only " + csize + " elements.";
throw new IllegalArgumentException(errmsg);
}
ArrayList<T> subset = new ArrayList<T>(count);
Iterator<T> iter = col.iterator();
int s = 0;
for (int k = 0; iter.hasNext(); k++) {
T elem = iter.next();
// the probability that an element is select for inclusion in our
// random subset is proportional to the number of elements
// remaining to be checked for inclusion divided by the number of
// elements remaining to be included
float limit = ((float)(count - s)) / ((float)(csize - k));
// include the record if our random value is below the limit
if (Math.random() < limit) {
subset.add(elem);
// stop looking if we've reached our target size
if (++s == count) {
break;
}
}
}
return subset;
} |
java | public int setDateTime(java.util.Date date, boolean bDisplayOption, int iMoveMode)
{
if (date == null)
return this.setData(date, bDisplayOption, iMoveMode);
m_calendar.setTime(date);
m_calendar.set(Calendar.HOUR_OF_DAY, DBConstants.HOUR_DATE_ONLY);
m_calendar.set(Calendar.MINUTE, 0);
m_calendar.set(Calendar.SECOND, 0);
m_calendar.set(Calendar.MILLISECOND, 0);
date = m_calendar.getTime();
return this.setValue(date.getTime(), bDisplayOption, iMoveMode);
} |
python | def crossover(dna1, dna2):
"""crossover dna1 and dna2 at a random index"""
pos = int(random.random()*len(dna1))
if random.random() < 0.5:
return (dna1[:pos]+dna2[pos:], dna2[:pos]+dna1[pos:])
else:
return (dna2[:pos]+dna1[pos:], dna1[:pos]+dna2[pos:]) |
python | def render_field(self, obj, field_name, **options):
"""Render field"""
try:
field = obj._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(obj, field_name, '')
if hasattr(field, 'choices') and getattr(field, 'choices'):
return getattr(obj, 'get_{}_display'.format(field_name))()
value = getattr(obj, field_name, '')
renderer = self.renderers.get(type(field))
if renderer:
return renderer(value, **options)
if isinstance(value, models.BaseModel):
value = str(value)
return self.render_value(value, **options) |
java | public void mark(Label label) {
adopt(label);
if (label.marked) {
throw new IllegalStateException("already marked");
}
label.marked = true;
if (currentLabel != null) {
jump(label); // blocks must end with a branch, return or throw
}
currentLabel = label;
} |
python | def fielddefsql_from_fieldspeclist(
self, fieldspeclist: FIELDSPECLIST_TYPE) -> str:
"""Returns list of field-defining SQL fragments."""
return ",".join([
self.fielddefsql_from_fieldspec(x)
for x in fieldspeclist
]) |
java | private ManagedBeanInvocation getServiceInvocation(Message message, JmxEndpointConfiguration endpointConfiguration) {
Object payload = message.getPayload();
ManagedBeanInvocation serviceInvocation = null;
if (payload != null) {
if (payload instanceof ManagedBeanInvocation) {
serviceInvocation = (ManagedBeanInvocation) payload;
} else if (payload != null && StringUtils.hasText(message.getPayload(String.class))) {
serviceInvocation = (ManagedBeanInvocation) endpointConfiguration.getMarshaller()
.unmarshal(message.getPayload(Source.class));
} else {
serviceInvocation = new ManagedBeanInvocation();
}
}
return serviceInvocation;
} |
python | def is_de_listed(self):
"""
判断合约是否过期
"""
instrument = Environment.get_instance().get_instrument(self._order_book_id)
current_date = Environment.get_instance().trading_dt
if instrument.de_listed_date is not None and current_date >= instrument.de_listed_date:
return True
return False |
java | protected List<VCProject> getParsedProjects( BuildPlatform platform, BuildConfiguration configuration )
throws MojoExecutionException
{
Map<String, String> envVariables = new HashMap<String, String>();
if ( isCxxTestEnabled( null, true ) )
{
envVariables.put( CxxTestConfiguration.HOME_ENVVAR, cxxTest.getCxxTestHome().getPath() );
}
VCProjectHolder vcProjectHolder = VCProjectHolder.getVCProjectHolder( projectFile,
MSBuildPackaging.isSolution( mavenProject.getPackaging() ), envVariables );
try
{
return vcProjectHolder.getParsedProjects( platform.getName(), configuration.getName() );
}
catch ( FileNotFoundException fnfe )
{
throw new MojoExecutionException( "Could not find file " + projectFile, fnfe );
}
catch ( IOException ioe )
{
throw new MojoExecutionException( "I/O error while parsing file " + projectFile, ioe );
}
catch ( SAXException se )
{
throw new MojoExecutionException( "Syntax error while parsing file " + projectFile, se );
}
catch ( ParserConfigurationException pce )
{
throw new MojoExecutionException( "XML parser configuration exception ", pce );
}
catch ( ParseException pe )
{
throw new MojoExecutionException( "Syntax error while parsing solution file " + projectFile, pe );
}
} |
python | def compute_wcs(key, challenge):
"""
Compute an WAMP-CRA authentication signature from an authentication
challenge and a (derived) key.
:param key: The key derived (via PBKDF2) from the secret.
:type key: str/bytes
:param challenge: The authentication challenge to sign.
:type challenge: str/bytes
:return: The authentication signature.
:rtype: bytes
"""
key = key.encode('utf8')
challenge = challenge.encode('utf8')
sig = hmac.new(key, challenge, hashlib.sha256).digest()
return binascii.b2a_base64(sig).strip() |
java | public static BufferedImage rotate(BufferedImage image, double angle, int cx, int cy) {
int width = image.getWidth(null);
int height = image.getHeight(null);
int minX, minY, maxX, maxY;
minX = minY = maxX = maxY = 0;
int[] corners = {0, 0, width, 0, width, height, 0, height};
double theta = Math.toRadians(angle);
for (int i = 0; i < corners.length; i += 2) {
int x = (int) (Math.cos(theta) * (corners[i] - cx)
- Math.sin(theta) * (corners[i + 1] - cy) + cx);
int y = (int) (Math.sin(theta) * (corners[i] - cx)
+ Math.cos(theta) * (corners[i + 1] - cy) + cy);
if (x > maxX) {
maxX = x;
}
if (x < minX) {
minX = x;
}
if (y > maxY) {
maxY = y;
}
if (y < minY) {
minY = y;
}
}
cx = (cx - minX);
cy = (cy - minY);
BufferedImage bi = new BufferedImage((maxX - minX), (maxY - minY),
image.getType());
Graphics2D g2 = bi.createGraphics();
g2.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BICUBIC);
g2.setBackground(Color.white);
g2.fillRect(0, 0, bi.getWidth(), bi.getHeight());
AffineTransform at = new AffineTransform();
at.rotate(theta, cx, cy);
g2.setTransform(at);
g2.drawImage(image, -minX, -minY, null);
g2.dispose();
return bi;
} |
java | public Vector<TaskInProgress> reportCleanupTIPs(boolean shouldBeComplete) {
Vector<TaskInProgress> results = new Vector<TaskInProgress>();
for (int i = 0; i < cleanup.length; i++) {
if (cleanup[i].isComplete() == shouldBeComplete) {
results.add(cleanup[i]);
}
}
return results;
} |
java | protected Widget getViewFromAdapter(final int index, ListItemHostWidget host) {
return mAdapter == null ? null : mAdapter.getView(index, host.getGuest(), host);
} |
java | @SuppressWarnings("unchecked")
@Override
public EList<IfcRelConnectsStructuralActivity> getAssignedStructuralActivity() {
return (EList<IfcRelConnectsStructuralActivity>) eGet(
Ifc4Package.Literals.IFC_STRUCTURAL_ITEM__ASSIGNED_STRUCTURAL_ACTIVITY, true);
} |
java | public JMFMessage copy() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.entry(this, tc, "copy");
JMFMessage copy;
synchronized (getMessageLockArtefact()) {
if (map == null) {
// If there is no map, then this is already become just a delegator, so
// the copy does not need to include the compatibility wrapper.
copy = ((JMFMessage)encoding).copy();
}
else {
// In the usual case, we maintain a compatibility layer
copy = new JSCompatibleMessageImpl(this);
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.exit(this, tc, "copy", copy);
return copy;
} |
java | public void addNotIn(String attribute, Collection values)
{
List list = splitInCriteria(attribute, values, true, IN_LIMIT);
InCriteria inCrit;
for (int index = 0; index < list.size(); index++)
{
inCrit = (InCriteria) list.get(index);
addSelectionCriteria(inCrit);
}
} |
python | def delete_raw(self):
"""Delete the current entity.
Make an HTTP DELETE call to ``self.path('base')``. Return the response.
:return: A ``requests.response`` object.
"""
return client.delete(
self.path(which='self'),
**self._server_config.get_client_kwargs()
) |
python | def get_aad_content_string(content_type, is_final_frame):
"""Prepares the appropriate Body AAD Value for a message body.
:param content_type: Defines the type of content for which to prepare AAD String
:type content_type: aws_encryption_sdk.identifiers.ContentType
:param bool is_final_frame: Boolean stating whether this is the final frame in a body
:returns: Appropriate AAD Content String
:rtype: bytes
:raises UnknownIdentityError: if unknown content type
"""
if content_type == ContentType.NO_FRAMING:
aad_content_string = ContentAADString.NON_FRAMED_STRING_ID
elif content_type == ContentType.FRAMED_DATA:
if is_final_frame:
aad_content_string = ContentAADString.FINAL_FRAME_STRING_ID
else:
aad_content_string = ContentAADString.FRAME_STRING_ID
else:
raise UnknownIdentityError("Unhandled content type")
return aad_content_string |
python | def get_angles(self, angle_id):
"""Get sun-satellite viewing angles"""
tic = datetime.now()
sunz40km = self._data["ang"][:, :, 0] * 1e-2
satz40km = self._data["ang"][:, :, 1] * 1e-2
azidiff40km = self._data["ang"][:, :, 2] * 1e-2
try:
from geotiepoints.interpolator import Interpolator
except ImportError:
logger.warning("Could not interpolate sun-sat angles, "
"python-geotiepoints missing.")
self.sunz, self.satz, self.azidiff = sunz40km, satz40km, azidiff40km
else:
cols40km = np.arange(24, 2048, 40)
cols1km = np.arange(2048)
lines = sunz40km.shape[0]
rows40km = np.arange(lines)
rows1km = np.arange(lines)
along_track_order = 1
cross_track_order = 3
satint = Interpolator(
[sunz40km, satz40km, azidiff40km], (rows40km, cols40km),
(rows1km, cols1km), along_track_order, cross_track_order)
self.sunz, self.satz, self.azidiff = satint.interpolate()
logger.debug("Interpolate sun-sat angles: time %s",
str(datetime.now() - tic))
return create_xarray(getattr(self, ANGLES[angle_id])) |
java | @Override
public V get(Object key) {
// Keys can not be null
if (key == null) {
throw new NullPointerException("Key can not be null");
}
if (!(key instanceof String)) {
throw new ClassCastException("Only String keys are supported -- got " + key.getClass());
}
// Empty keys are stored in the root
if (key.equals("")) {
if (root.getRight() == null) {
return null;
} else {
return root.getRight().getValue();
}
}
// Find nearest node
PatriciaNode<V> nearest = findNearestNode((String) key);
// If the nearest node matches key, we have a match
if (key.equals(nearest.getKey())) {
return nearest.getValue();
} else {
return null;
}
} |
java | protected void addDocIterators(Collection<Iterator<Document>> docIters,
String[] fileNames) throws IOException {
// All the documents are listed in one file, with one document per line
for (String s : fileNames)
docIters.add(new DependencyFileDocumentIterator(s));
} |
java | @Override public Date parse(String str, ParsePosition pos)
{
Date result;
if (str == null || str.trim().length() == 0)
{
result = null;
pos.setIndex(-1);
}
else
{
result = parseNonNullDate(str, pos);
}
return result;
} |
java | public String readResource(URI endpoint, CredentialsEndpointRetryPolicy retryPolicy, Map<String, String> headers) throws IOException {
int retriesAttempted = 0;
InputStream inputStream = null;
headers = addDefaultHeaders(headers);
while (true) {
try {
HttpURLConnection connection = connectionUtils.connectToEndpoint(endpoint, headers);
int statusCode = connection.getResponseCode();
if (statusCode == HttpURLConnection.HTTP_OK) {
inputStream = connection.getInputStream();
return IOUtils.toString(inputStream);
} else if (statusCode == HttpURLConnection.HTTP_NOT_FOUND) {
// This is to preserve existing behavior of EC2 Instance metadata service.
throw new SdkClientException("The requested metadata is not found at " + connection.getURL());
} else {
if (!retryPolicy.shouldRetry(retriesAttempted++, CredentialsEndpointRetryParameters.builder().withStatusCode(statusCode).build())) {
inputStream = connection.getErrorStream();
handleErrorResponse(inputStream, statusCode, connection.getResponseMessage());
}
}
} catch (IOException ioException) {
if (!retryPolicy.shouldRetry(retriesAttempted++, CredentialsEndpointRetryParameters.builder().withException(ioException).build())) {
throw ioException;
}
LOG.debug("An IOException occured when connecting to service endpoint: " + endpoint + "\n Retrying to connect again.");
} finally {
IOUtils.closeQuietly(inputStream, LOG);
}
}
} |
java | private String persistWorkUnit(final Path workUnitFileDir, final WorkUnit workUnit, ParallelRunner stateSerDeRunner)
throws IOException {
final StateStore stateStore;
String workUnitFileName = workUnit.getId();
if (workUnit instanceof MultiWorkUnit) {
workUnitFileName += MULTI_WORK_UNIT_FILE_EXTENSION;
stateStore = stateStores.getMwuStateStore();
} else {
workUnitFileName += WORK_UNIT_FILE_EXTENSION;
stateStore = stateStores.getWuStateStore();
}
Path workUnitFile = new Path(workUnitFileDir, workUnitFileName);
final String fileName = workUnitFile.getName();
final String storeName = workUnitFile.getParent().getName();
stateSerDeRunner.submitCallable(new Callable<Void>() {
@Override
public Void call() throws Exception {
stateStore.put(storeName, fileName, workUnit);
return null;
}
}, "Serialize state to store " + storeName + " file " + fileName);
return workUnitFile.toString();
} |
java | public void setupReadListener(ReadListener readListenerl, SRTUpgradeInputStream31 srtUpgradeStream){
if(readListenerl == null){
if (TraceComponent.isAnyTracingEnabled() && tc.isErrorEnabled())
Tr.error(tc, "readlistener.is.null");
throw new NullPointerException(Tr.formatMessage(tc, "readlistener.is.null"));
}
if(_rl != null){
if (TraceComponent.isAnyTracingEnabled() && tc.isErrorEnabled())
Tr.error(tc, "readlistener.already.started");
throw new IllegalStateException(Tr.formatMessage(tc, "readlistener.already.started"));
}
//Save off the current Thread data by creating the ThreadContextManager. Then pass it into the callback
ThreadContextManager tcm = new ThreadContextManager();
_tcpChannelCallback = new UpgradeReadCallback(readListenerl, this, tcm, srtUpgradeStream);
_rl = readListenerl;
_isReady = false;
_upConn.getVirtualConnection().getStateMap().put(TransportConstants.UPGRADED_LISTENER, "true");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "setupReadListener, Starting the initial read");
}
initialRead();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "setupReadListener, ReadListener set : " + _rl);
}
} |
python | def proQuestRecordParser(enRecordFile, recNum):
"""The parser [ProQuestRecords](../classes/ProQuestRecord.html#metaknowledge.proquest.ProQuestRecord) use. This takes an entry from [proQuestParser()](#metaknowledge.proquest.proQuestHandlers.proQuestParser) and parses it a part of the creation of a `ProQuestRecord`.
# Parameters
_enRecordFile_ : `enumerate object`
> a file wrapped by `enumerate()`
_recNum_ : `int`
> The number given to the entry in the first section of the ProQuest file
# Returns
`collections.OrderedDict`
> An ordered dictionary of the key-vaue pairs in the entry
"""
tagDict = collections.OrderedDict()
currentEntry = 'Name'
while True:
lineNum, line = next(enRecordFile)
if line == '_' * 60 + '\n':
break
elif line == '\n':
pass
elif currentEntry is 'Name' or currentEntry is 'url':
tagDict[currentEntry] = [line.rstrip()]
currentEntry = None
elif ':' in line and not line.startswith('http://'):
splitLine = line.split(': ')
currentEntry = splitLine[0]
tagDict[currentEntry] = [': '.join(splitLine[1:]).rstrip()]
if currentEntry == 'Author':
currentEntry = 'url'
else:
tagDict[currentEntry].append(line.rstrip())
return tagDict |
python | def tick(self):
"""Updates meters"""
for m in self._meters:
m.tick()
self['m1'] = self._m1.rate
self['m5'] = self._m5.rate
self['m15'] = self._m15.rate |
python | def kronecker_lmm(snps,phenos,covs=None,Acovs=None,Asnps=None,K1r=None,K1c=None,K2r=None,K2c=None,covar_type='lowrank_diag',rank=1,NumIntervalsDelta0=100,NumIntervalsDeltaAlt=0,searchDelta=False):
"""
simple wrapper for kroneckerLMM code
Args:
snps: [N x S] SP.array of S SNPs for N individuals (test SNPs)
phenos: [N x P] SP.array of P phenotypes for N individuals
covs: list of SP.arrays holding covariates. Each covs[i] has one corresponding Acovs[i]
Acovs: list of SP.arrays holding the phenotype design matrices for covariates.
Each covs[i] has one corresponding Acovs[i].
Asnps: single SP.array of I0 interaction variables to be included in the
background model when testing for interaction with Inters
If not provided, the alternative model will be the independent model
K1r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K1c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2r: [N x N] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
K2c: [P x P] SP.array of LMM-covariance/kinship koefficients (optional)
If not provided, then linear regression analysis is performed
covar_type: type of covaraince to use. Default 'freeform'. possible values are
'freeform': free form optimization,
'fixed': use a fixed matrix specified in covar_K0,
'diag': optimize a diagonal matrix,
'lowrank': optimize a low rank matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_id': optimize a low rank matrix plus the weight of a constant diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'lowrank_diag': optimize a low rank matrix plus a free diagonal matrix. The rank of the lowrank part is specified in the variable rank,
'block': optimize the weight of a constant P x P block matrix of ones,
'block_id': optimize the weight of a constant P x P block matrix of ones plus the weight of a constant diagonal matrix,
'block_diag': optimize the weight of a constant P x P block matrix of ones plus a free diagonal matrix,
rank: rank of a possible lowrank component (default 1)
NumIntervalsDelta0: number of steps for delta optimization on the null model (100)
NumIntervalsDeltaAlt:number of steps for delta optimization on the alt. model (0 - no optimization)
searchDelta: Boolean indicator if delta is optimized during SNP testing (default False)
Returns:
CKroneckerLMM object
P-values for all SNPs from liklelihood ratio test
"""
#0. checks
N = phenos.shape[0]
P = phenos.shape[1]
if K1r==None:
K1r = SP.dot(snps,snps.T)
else:
assert K1r.shape[0]==N, 'K1r: dimensions dismatch'
assert K1r.shape[1]==N, 'K1r: dimensions dismatch'
if K2r==None:
K2r = SP.eye(N)
else:
assert K2r.shape[0]==N, 'K2r: dimensions dismatch'
assert K2r.shape[1]==N, 'K2r: dimensions dismatch'
covs,Acovs = updateKronCovs(covs,Acovs,N,P)
#Asnps can be several designs
if Asnps is None:
Asnps = [SP.ones([1,P])]
if (type(Asnps)!=list):
Asnps = [Asnps]
assert len(Asnps)>0, "need at least one Snp design matrix"
#one row per column design matrix
pv = SP.zeros((len(Asnps),snps.shape[1]))
#1. run GP model to infer suitable covariance structure
if K1c==None or K2c==None:
vc = estimateKronCovariances(phenos=phenos, K1r=K1r, K2r=K2r, K1c=K1c, K2c=K2c, covs=covs, Acovs=Acovs, covar_type=covar_type, rank=rank)
K1c = vc.getEstTraitCovar(0)
K2c = vc.getEstTraitCovar(1)
else:
assert K1c.shape[0]==P, 'K1c: dimensions dismatch'
assert K1c.shape[1]==P, 'K1c: dimensions dismatch'
assert K2c.shape[0]==P, 'K2c: dimensions dismatch'
assert K2c.shape[1]==P, 'K2c: dimensions dismatch'
#2. run kroneckerLMM
lmm = limix.CKroneckerLMM()
lmm.setK1r(K1r)
lmm.setK1c(K1c)
lmm.setK2r(K2r)
lmm.setK2c(K2c)
lmm.setSNPs(snps)
#add covariates
for ic in range(len(Acovs)):
lmm.addCovariates(covs[ic],Acovs[ic])
lmm.setPheno(phenos)
#delta serch on alt. model?
if searchDelta:
lmm.setNumIntervalsAlt(NumIntervalsDeltaAlt)
else:
lmm.setNumIntervalsAlt(0)
lmm.setNumIntervals0(NumIntervalsDelta0)
for iA in range(len(Asnps)):
#add SNP design
lmm.setSNPcoldesign(Asnps[iA])
lmm.process()
pv[iA,:] = lmm.getPv()[0]
return lmm,pv |
python | def generate_mapping(order):
"""
This function will take an order string and return a mapping between
components in the metric and the various Lambda components. This must be
used (and consistently used) when generating the metric *and* when
transforming to/from the xi_i coordinates to the lambda_i coordinates.
NOTE: This is not a great way of doing this. It would be nice to clean
this up. Hence pulling this function out. The valid PN orders are
{}
Parameters
----------
order : string
A string containing a PN order. Valid values are given above.
Returns
--------
mapping : dictionary
A mapping between the active Lambda terms and index in the metric
"""
mapping = {}
mapping['Lambda0'] = 0
if order == 'zeroPN':
return mapping
mapping['Lambda2'] = 1
if order == 'onePN':
return mapping
mapping['Lambda3'] = 2
if order == 'onePointFivePN':
return mapping
mapping['Lambda4'] = 3
if order == 'twoPN':
return mapping
mapping['LogLambda5'] = 4
if order == 'twoPointFivePN':
return mapping
mapping['Lambda6'] = 5
mapping['LogLambda6'] = 6
if order == 'threePN':
return mapping
mapping['Lambda7'] = 7
if order == 'threePointFivePN':
return mapping
# For some as-of-yet unknown reason, the tidal terms are not giving correct
# match estimates when enabled. So, for now, this order is commented out.
#if order == 'tidalTesting':
# mapping['Lambda10'] = 8
# mapping['Lambda12'] = 9
# return mapping
raise ValueError("Order %s is not understood." %(order)) |
python | def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
"""Return socket file object."""
cls = StreamReader if 'r' in mode else StreamWriter
return cls(sock, mode, bufsize) |
python | def update(self, i):
"""D.update(E) -> None. Update D from iterable E with pre-existing
items being overwritten.
Elements in E are assumed to be dicts containing the primary key to
allow the equivelent of:
for k in E: D[k.primary_key] = k
"""
key_list = self.key_list
keynone = {key:None for key in key_list}
# Generator which fills in missing data from the original iterator
def datagen(i):
for datum in i:
tmp = keynone.copy()
tmp.update(datum)
yield tmp
with self._connection as con:
con.executemany(
"""INSERT OR REPLACE INTO {table} ({keylist})
VALUES (:{vallist});
""".format(table=self.table,
keylist=", ".join(self.key_list),
vallist=", :".join(self.key_list)
), datagen(i)) |
python | def future(self,in_days=None,in_hours=None,in_minutes=None,in_seconds=None):
"""
Function to return a future timestep
"""
future = None
# Initialize variables to 0
dd, hh, mm, ss = [0 for i in range(4)]
if (in_days != None):
dd = dd + in_days
if (in_hours != None):
hh = hh + in_hours
if (in_minutes != None):
mm = mm + in_minutes
if (in_seconds != None):
ss = ss + in_seconds
# Set the hours, minutes and seconds from now (minus the days)
dnow = datetime.datetime.utcnow() # Now
d = dnow + \
datetime.timedelta(hours=hh, minutes=mm, seconds = ss)
# Time from midnight
for_total_seconds = d - \
d.replace(hour=0, minute=0, second=0, microsecond=0)
# Convert into minutes since midnight
try:
msm = for_total_seconds.total_seconds()/60.
except:
# For versions before 2.7
msm = self.timedelta_total_seconds(for_total_seconds)/60.
if (dd<len(self.days)):
for timestep in self.days[dd].timesteps:
if timestep.name >= msm:
future = timestep
return future
else:
print('ERROR: requested date is outside the forecast range selected,' + str(len(self.days)))
return False |
java | public static int mix32(int k) {
k = (k ^ (k >>> 16)) * 0x85ebca6b;
k = (k ^ (k >>> 13)) * 0xc2b2ae35;
return k ^ (k >>> 16);
} |
java | public Optional<Dashboard> update(Dashboard dashboard)
{
return HTTP.PUT(String.format("/v2/dashboards/%d.json", dashboard.getId()), dashboard, DASHBOARD);
} |
python | def view_get(method_name):
"""
Creates a getter that will drop the current value,
and call the view's method with specified name
using the context's key as first argument.
@param method_name: the name of a method belonging to the view.
@type method_name: str
"""
def view_get(_value, context, **_params):
method = getattr(context["view"], method_name)
return _get(method, context["key"], (), {})
return view_get |
python | def document_for_search(self, search_state):
"""
Return a :class:`~prompt_toolkit.document.Document` instance that has
the text/cursor position for this search, if we would apply it. This
will be used in the
:class:`~prompt_toolkit.layout.controls.BufferControl` to display
feedback while searching.
"""
search_result = self._search(search_state, include_current_position=True)
if search_result is None:
return self.document
else:
working_index, cursor_position = search_result
# Keep selection, when `working_index` was not changed.
if working_index == self.working_index:
selection = self.selection_state
else:
selection = None
return Document(self._working_lines[working_index],
cursor_position, selection=selection) |
java | public void add(VerifierComponent descr) {
if ( subSolver != null ) {
subSolver.add( descr );
} else {
if ( type == OperatorDescrType.AND ) {
if ( possibilityLists.isEmpty() ) {
possibilityLists.add( new HashSet<VerifierComponent>() );
}
for ( Set<VerifierComponent> set : possibilityLists ) {
set.add( descr );
}
} else if ( type == OperatorDescrType.OR ) {
Set<VerifierComponent> set = new HashSet<VerifierComponent>();
set.add( descr );
possibilityLists.add( set );
}
}
} |
java | public void debug(Marker marker, String format, Object... argArray) {
if (!logger.isDebugEnabled(marker))
return;
if (instanceofLAL) {
FormattingTuple ft = MessageFormatter.arrayFormat(format, argArray);
((LocationAwareLogger) logger).log(marker, fqcn, LocationAwareLogger.DEBUG_INT, ft.getMessage(), argArray, ft.getThrowable());
} else {
logger.debug(marker, format, argArray);
}
} |
java | private final T expeditedExtract() {
T old = expeditedBuffer[expeditedTakeIndex];
expeditedBuffer[expeditedTakeIndex] = null;
if (++expeditedTakeIndex >= expeditedBuffer.length)
expeditedTakeIndex = 0;
return old;
} |
python | def add_sam2rnf_parser(subparsers, subcommand, help, description, simulator_name=None):
"""Add another parser for a SAM2RNF-like command.
Args:
subparsers (subparsers): File name of the genome from which read tuples are created (FASTA file).
simulator_name (str): Name of the simulator used in comments.
"""
parser_sam2rnf = subparsers.add_parser(subcommand, help=help, description=description)
parser_sam2rnf.set_defaults(func=sam2rnf)
parser_sam2rnf.add_argument(
'-s', '--sam', type=str, metavar='file', dest='sam_fn', required=True,
help='Input SAM/BAM with true (expected) alignments of the reads (- for standard input).'
)
_add_shared_params(parser_sam2rnf, unmapped_switcher=True)
parser_sam2rnf.add_argument(
'-n',
'--simulator-name',
type=str,
metavar='str',
dest='simulator_name',
default=simulator_name,
help='Name of the simulator (for RNF).' if simulator_name is not None else argparse.SUPPRESS,
) |
python | def process_save(X, y, tokenizer, proc_data_path, max_len=400, train=False, ngrams=None, limit_top_tokens=None):
"""Process text and save as Dataset
"""
if train and limit_top_tokens is not None:
tokenizer.apply_encoding_options(limit_top_tokens=limit_top_tokens)
X_encoded = tokenizer.encode_texts(X)
if ngrams is not None:
X_encoded = tokenizer.add_ngrams(X_encoded, n=ngrams, train=train)
X_padded = tokenizer.pad_sequences(
X_encoded, fixed_token_seq_length=max_len)
if train:
ds = Dataset(X_padded,
y, tokenizer=tokenizer)
else:
ds = Dataset(X_padded, y)
ds.save(proc_data_path) |
java | public static String baseUrl() {
String baseURL = System.getProperty(BASE_URL_PROPERTY, "/_ah/pipeline/");
if (!baseURL.endsWith("/")) {
baseURL += "/";
}
return baseURL;
} |
java | public void put(String localFile, String remoteFile, boolean recursive)
throws SshException, ChannelOpenException, SftpStatusException {
put(localFile, remoteFile, recursive, null);
} |
java | private void accum_all(Chunk chks[], Chunk wrks, int nnids[]) {
final DHistogram hcs[][] = _hcs;
// Sort the rows by NID, so we visit all the same NIDs in a row
// Find the count of unique NIDs in this chunk
int nh[] = new int[hcs.length+1];
for( int i : nnids ) if( i >= 0 ) nh[i+1]++;
// Rollup the histogram of rows-per-NID in this chunk
for( int i=0; i<hcs.length; i++ ) nh[i+1] += nh[i];
// Splat the rows into NID-groups
int rows[] = new int[nnids.length];
for( int row=0; row<nnids.length; row++ )
if( nnids[row] >= 0 )
rows[nh[nnids[row]]++] = row;
// rows[] has Chunk-local ROW-numbers now, in-order, grouped by NID.
// nh[] lists the start of each new NID, and is indexed by NID+1.
accum_all2(chks,wrks,nh,rows);
} |
python | def apply_branchset(self, branchset_node, branchset):
"""
See superclass' method for description and signature specification.
Parses branchset node's attribute ``@applyToBranches`` to apply
following branchests to preceding branches selectively. Branching
level can have more than one branchset exactly for this: different
branchsets can apply to different open ends.
Checks that branchset tries to be applied only to branches on previous
branching level which do not have a child branchset yet.
"""
apply_to_branches = branchset_node.attrib.get('applyToBranches')
if apply_to_branches:
apply_to_branches = apply_to_branches.split()
for branch_id in apply_to_branches:
if branch_id not in self.branches:
raise LogicTreeError(
branchset_node, self.filename,
"branch '%s' is not yet defined" % branch_id)
branch = self.branches[branch_id]
if branch.child_branchset is not None:
raise LogicTreeError(
branchset_node, self.filename,
"branch '%s' already has child branchset" % branch_id)
if branch not in self.open_ends:
raise LogicTreeError(
branchset_node, self.filename,
'applyToBranches must reference only branches '
'from previous branching level')
branch.child_branchset = branchset
else:
for branch in self.open_ends:
branch.child_branchset = branchset |
python | def dump_statements(stmts, fname, protocol=4):
"""Dump a list of statements into a pickle file.
Parameters
----------
fname : str
The name of the pickle file to dump statements into.
protocol : Optional[int]
The pickle protocol to use (use 2 for Python 2 compatibility).
Default: 4
"""
logger.info('Dumping %d statements into %s...' % (len(stmts), fname))
with open(fname, 'wb') as fh:
pickle.dump(stmts, fh, protocol=protocol) |
python | def nucmer(args):
"""
%prog nucmer mappings.bed MTR.fasta assembly.fasta chr1 3
Select specific chromosome region based on MTR mapping. The above command
will extract chr1:2,000,001-3,000,000.
"""
p = OptionParser(nucmer.__doc__)
opts, args = p.parse_args(args)
if len(args) != 5:
sys.exit(not p.print_help())
mapbed, mtrfasta, asmfasta, chr, idx = args
idx = int(idx)
m1 = 1000000
bedfile = "sample.bed"
bed = Bed()
bed.add("\t".join(str(x) for x in (chr, (idx - 1) * m1, idx * m1)))
bed.print_to_file(bedfile)
cmd = "intersectBed -a {0} -b {1} -nonamecheck -sorted | cut -f4".\
format(mapbed, bedfile)
idsfile = "query.ids"
sh(cmd, outfile=idsfile)
sfasta = fastaFromBed(bedfile, mtrfasta)
qfasta = "query.fasta"
cmd = "faSomeRecords {0} {1} {2}".format(asmfasta, idsfile, qfasta)
sh(cmd)
cmd = "nucmer {0} {1}".format(sfasta, qfasta)
sh(cmd)
mummerplot_main(["out.delta", "--refcov=0"])
sh("mv out.pdf {0}.{1}.pdf".format(chr, idx)) |
java | @Override
public CommerceNotificationQueueEntry findByGroupId_First(long groupId,
OrderByComparator<CommerceNotificationQueueEntry> orderByComparator)
throws NoSuchNotificationQueueEntryException {
CommerceNotificationQueueEntry commerceNotificationQueueEntry = fetchByGroupId_First(groupId,
orderByComparator);
if (commerceNotificationQueueEntry != null) {
return commerceNotificationQueueEntry;
}
StringBundler msg = new StringBundler(4);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("groupId=");
msg.append(groupId);
msg.append("}");
throw new NoSuchNotificationQueueEntryException(msg.toString());
} |
java | @Override
public EClass getIfcConstructionProductResourceType() {
if (ifcConstructionProductResourceTypeEClass == null) {
ifcConstructionProductResourceTypeEClass = (EClass) EPackage.Registry.INSTANCE
.getEPackage(Ifc4Package.eNS_URI).getEClassifiers().get(133);
}
return ifcConstructionProductResourceTypeEClass;
} |
python | def _prune_maps_to_sequences(self):
''' When we merge the SIFTS maps, we can extend the sequence maps such that they have elements in their domain that we removed
from the sequence e.g. 1A2P, residue 'B 3 ' is removed because Rosetta barfs on it. Here, we prune the maps so that their
domains do not have elements that were removed from sequences.'''
for c, seq in self.atom_sequences.iteritems():
res_ids = [r[0] for r in seq]
for_removal = []
for k, _, _ in self.atom_to_seqres_sequence_maps[c]:
if k not in res_ids:
for_removal.append(k)
for res_id in for_removal:
self.atom_to_seqres_sequence_maps[c].remove(res_id) |
python | def list_active_vms(cwd=None):
'''
Return a list of machine names for active virtual machine on the host,
which are defined in the Vagrantfile at the indicated path.
CLI Example:
.. code-block:: bash
salt '*' vagrant.list_active_vms cwd=/projects/project_1
'''
vms = []
cmd = 'vagrant status'
reply = __salt__['cmd.shell'](cmd, cwd=cwd)
log.info('--->\n%s', reply)
for line in reply.split('\n'): # build a list of the text reply
tokens = line.strip().split()
if len(tokens) > 1:
if tokens[1] == 'running':
vms.append(tokens[0])
return vms |
java | public final EObject ruleXExpressionInClosure() throws RecognitionException {
EObject current = null;
Token otherlv_2=null;
EObject lv_expressions_1_0 = null;
enterRule();
try {
// InternalXbase.g:2641:2: ( ( () ( ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) ) (otherlv_2= ';' )? )* ) )
// InternalXbase.g:2642:2: ( () ( ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) ) (otherlv_2= ';' )? )* )
{
// InternalXbase.g:2642:2: ( () ( ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) ) (otherlv_2= ';' )? )* )
// InternalXbase.g:2643:3: () ( ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) ) (otherlv_2= ';' )? )*
{
// InternalXbase.g:2643:3: ()
// InternalXbase.g:2644:4:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElement(
grammarAccess.getXExpressionInClosureAccess().getXBlockExpressionAction_0(),
current);
}
}
// InternalXbase.g:2650:3: ( ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) ) (otherlv_2= ';' )? )*
loop44:
do {
int alt44=2;
int LA44_0 = input.LA(1);
if ( ((LA44_0>=RULE_STRING && LA44_0<=RULE_ID)||LA44_0==19||(LA44_0>=35 && LA44_0<=36)||LA44_0==41||LA44_0==49||(LA44_0>=51 && LA44_0<=52)||LA44_0==54||LA44_0==58||LA44_0==60||(LA44_0>=64 && LA44_0<=81)||LA44_0==83) ) {
alt44=1;
}
switch (alt44) {
case 1 :
// InternalXbase.g:2651:4: ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) ) (otherlv_2= ';' )?
{
// InternalXbase.g:2651:4: ( (lv_expressions_1_0= ruleXExpressionOrVarDeclaration ) )
// InternalXbase.g:2652:5: (lv_expressions_1_0= ruleXExpressionOrVarDeclaration )
{
// InternalXbase.g:2652:5: (lv_expressions_1_0= ruleXExpressionOrVarDeclaration )
// InternalXbase.g:2653:6: lv_expressions_1_0= ruleXExpressionOrVarDeclaration
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXExpressionInClosureAccess().getExpressionsXExpressionOrVarDeclarationParserRuleCall_1_0_0());
}
pushFollow(FOLLOW_42);
lv_expressions_1_0=ruleXExpressionOrVarDeclaration();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXExpressionInClosureRule());
}
add(
current,
"expressions",
lv_expressions_1_0,
"org.eclipse.xtext.xbase.Xbase.XExpressionOrVarDeclaration");
afterParserOrEnumRuleCall();
}
}
}
// InternalXbase.g:2670:4: (otherlv_2= ';' )?
int alt43=2;
int LA43_0 = input.LA(1);
if ( (LA43_0==57) ) {
alt43=1;
}
switch (alt43) {
case 1 :
// InternalXbase.g:2671:5: otherlv_2= ';'
{
otherlv_2=(Token)match(input,57,FOLLOW_43); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_2, grammarAccess.getXExpressionInClosureAccess().getSemicolonKeyword_1_1());
}
}
break;
}
}
break;
default :
break loop44;
}
} while (true);
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} |
java | public String getTextForExpression(DJGroup group, DJGroup childGroup, String type) {
return "new Double( $V{" + getReportName() + "_" + getGroupVariableName(childGroup) + "}.doubleValue() / $V{" + getReportName() + "_" + getGroupVariableName(type,group.getColumnToGroupBy().getColumnProperty().getProperty()) + "}.doubleValue())";
} |
java | public final void mISO() throws RecognitionException {
try {
int _type = ISO;
int _channel = DEFAULT_TOKEN_CHANNEL;
// druidG.g:602:6: ( ( 'ISO' ) )
// druidG.g:602:7: ( 'ISO' )
{
// druidG.g:602:7: ( 'ISO' )
// druidG.g:602:8: 'ISO'
{
match("ISO");
}
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
} |
java | @Override
public com.liferay.commerce.model.CommerceCountry fetchCommerceCountryByUuidAndGroupId(
String uuid, long groupId) {
return _commerceCountryLocalService.fetchCommerceCountryByUuidAndGroupId(uuid,
groupId);
} |
python | def maps_get_rules_output_rules_policyname(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
maps_get_rules = ET.Element("maps_get_rules")
config = maps_get_rules
output = ET.SubElement(maps_get_rules, "output")
rules = ET.SubElement(output, "rules")
policyname = ET.SubElement(rules, "policyname")
policyname.text = kwargs.pop('policyname')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
java | void set(final long longVal) {
this.type = ClassWriter.LONG;
this.longVal = longVal;
this.hashCode = 0x7FFFFFFF & (type + (int) longVal);
} |
python | def get_vexrc(options, environ):
"""Get a representation of the contents of the config file.
:returns:
a Vexrc instance.
"""
# Complain if user specified nonexistent file with --config.
# But we don't want to complain just because ~/.vexrc doesn't exist.
if options.config and not os.path.exists(options.config):
raise exceptions.InvalidVexrc("nonexistent config: {0!r}".format(options.config))
filename = options.config or os.path.expanduser('~/.vexrc')
vexrc = config.Vexrc.from_file(filename, environ)
return vexrc |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.