language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def endpoint_from_job(self, job_name, initial_instance_count, instance_type,
deployment_image=None, name=None, role=None, wait=True,
model_environment_vars=None, vpc_config_override=vpc_utils.VPC_CONFIG_DEFAULT,
accelerator_type=None):
"""Create an ``Endpoint`` using the results of a successful training job.
Specify the job name, Docker image containing the inference code, and hardware configuration to deploy
the model. Internally the API, creates an Amazon SageMaker model (that describes the model artifacts and
the Docker image containing inference code), endpoint configuration (describing the hardware to deploy
for hosting the model), and creates an ``Endpoint`` (launches the EC2 instances and deploys the model on them).
In response, the API returns the endpoint name to which you can send requests for inferences.
Args:
job_name (str): Name of the training job to deploy the results of.
initial_instance_count (int): Minimum number of EC2 instances to launch. The actual number of
active instances for an endpoint at any given time varies due to autoscaling.
instance_type (str): Type of EC2 instance to deploy to an endpoint for prediction,
for example, 'ml.c4.xlarge'.
deployment_image (str): The Docker image which defines the inference code to be used as the entry point for
accepting prediction requests. If not specified, uses the image used for the training job.
name (str): Name of the ``Endpoint`` to create. If not specified, uses the training job name.
role (str): An AWS IAM role (either name or full ARN). The Amazon SageMaker training jobs and APIs
that create Amazon SageMaker endpoints use this role to access training data and model artifacts.
You must grant sufficient permissions to this role.
wait (bool): Whether to wait for the endpoint deployment to complete before returning (default: True).
model_environment_vars (dict[str, str]): Environment variables to set on the model container
(default: None).
vpc_config_override (dict[str, list[str]]): Overrides VpcConfig set on the model.
Default: use VpcConfig from training job.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
accelerator_type (str): Type of Elastic Inference accelerator to attach to the instance. For example,
'ml.eia1.medium'. For more information: https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html
Returns:
str: Name of the ``Endpoint`` that is created.
"""
job_desc = self.sagemaker_client.describe_training_job(TrainingJobName=job_name)
output_url = job_desc['ModelArtifacts']['S3ModelArtifacts']
deployment_image = deployment_image or job_desc['AlgorithmSpecification']['TrainingImage']
role = role or job_desc['RoleArn']
name = name or job_name
vpc_config_override = _vpc_config_from_training_job(job_desc, vpc_config_override)
return self.endpoint_from_model_data(model_s3_location=output_url, deployment_image=deployment_image,
initial_instance_count=initial_instance_count, instance_type=instance_type,
name=name, role=role, wait=wait,
model_environment_vars=model_environment_vars,
model_vpc_config=vpc_config_override, accelerator_type=accelerator_type)
|
python
|
def double_typos(self):
"""letter combinations two typos away from word"""
return {e2 for e1 in self.typos()
for e2 in Word(e1).typos()}
|
python
|
def cross_track_distance(self, start_point, end_point):
'''
Return the cross track distance from this point to the line between two
points::
* end_point
/
/
/ * this point
/
/
*
start_point
:param start_point: First point on the line
:type start_point: Point
:param end_point: Second point on the line
:type end_point: Point
:returns: The perpendicular distance to the line between ``start_point``
and ``end_point``, where distance on the right of ``start_point``
is positive and distance on the left is negative
:rtype: float
'''
dist = start_point.distance_to(self)
bearing_to_end = start_point.bearing_to(end_point).radians
bearing_to_point = start_point.bearing_to(self).radians
return math.asin(math.sin(dist / EARTH_RADIUS) * \
math.sin(bearing_to_point - bearing_to_end)) * \
EARTH_RADIUS
|
python
|
def _GetTaskStorageFilePath(self, task):
"""Retrieves the path of a task storage file in the temporary directory.
Args:
task (Task): task.
Returns:
str: path of a task storage file in the temporary directory.
"""
filename = '{0:s}.plaso'.format(task.identifier)
return os.path.join(self._task_storage_path, filename)
|
python
|
def get_nodes_by_source(self, graph, source_full_name):
"""yields nodes from graph are the specified source."""
parts = source_full_name.split('.')
if len(parts) == 1:
target_source, target_table = parts[0], None
elif len(parts) == 2:
target_source, target_table = parts
else: # len(parts) > 2 or len(parts) == 0
msg = (
'Invalid source selector value "{}". Sources must be of the '
'form `${{source_name}}` or '
'`${{source_name}}.${{target_name}}`'
).format(source_full_name)
raise dbt.exceptions.RuntimeException(msg)
for node, real_node in self.source_nodes(graph):
if target_source not in (real_node.source_name, SELECTOR_GLOB):
continue
if target_table in (None, real_node.name, SELECTOR_GLOB):
yield node
|
python
|
def has_listener(self, evt_name, fn):
"""ๆๅฎlistenerๆฏๅฆๅญๅจ
:params evt_name: ไบไปถๅ็งฐ
:params fn: ่ฆๆณจๅ็่งฆๅๅฝๆฐๅฝๆฐ
"""
listeners = self.__get_listeners(evt_name)
return fn in listeners
|
java
|
public static long[] getContentRange(Map<String, String> headers) {
String value = getHeaderValue(headers, HTTP_CONTENT_RANGE_HEADER_UP);
if (value == null) return null;
return parseContentRange(value.trim());
}
|
python
|
def std_salt(length=16, lowercase=True):
"""Generates a cryptographically sane salt of 'length' (default: 16) alphanumeric
characters
"""
alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
if lowercase is True:
alphabet += "abcdefghijklmnopqrstuvwxyz"
chars = []
for i in range(length):
chars.append(choice(alphabet))
return "".join(chars)
|
python
|
def EnsureTempDirIsSane(directory):
"""Checks that the directory exists and has the correct permissions set."""
if not os.path.isabs(directory):
raise ErrorBadPath("Directory %s is not absolute" % directory)
if os.path.isdir(directory):
# The temp dir already exists, we probably created it already but
# let's check to make sure.
if not client_utils.VerifyFileOwner(directory):
# Just delete it, it's only temp dirs and we don't own it. If
# this goes wrong we just raise.
shutil.rmtree(directory)
if not os.path.isdir(directory):
os.makedirs(directory)
# Make directory 700 before we write the file
if sys.platform == "win32":
from grr_response_client import client_utils_windows # pylint: disable=g-import-not-at-top
client_utils_windows.WinChmod(directory,
["FILE_GENERIC_READ", "FILE_GENERIC_WRITE"])
else:
os.chmod(directory, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR)
|
java
|
public final void synpred28_DRL5Expressions_fragment() throws RecognitionException {
// src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:549:15: ( DOT ID )
// src/main/resources/org/drools/compiler/lang/DRL5Expressions.g:549:16: DOT ID
{
match(input,DOT,FOLLOW_DOT_in_synpred28_DRL5Expressions2916); if (state.failed) return;
match(input,ID,FOLLOW_ID_in_synpred28_DRL5Expressions2918); if (state.failed) return;
}
}
|
java
|
@Nullable
public static String[] optStringArray(@Nullable Bundle bundle, @Nullable String key) {
return optStringArray(bundle, key, new String[0]);
}
|
python
|
def add_request_participants(self, issue_id_or_key, users_list):
"""
Add users as participants to an existing customer request
The calling user must have permission to manage participants for this customer request
:param issue_id_or_key: str
:param users_list: list
:return:
"""
url = 'rest/servicedeskapi/request/{}/participant'.format(issue_id_or_key)
data = {'usernames': users_list}
return self.post(url, data=data)
|
java
|
public static String patternForMoment(
DisplayMode dateMode,
DisplayMode timeMode,
Locale locale
) {
return FORMAT_PATTERN_PROVIDER.getDateTimePattern(dateMode, timeMode, locale);
}
|
python
|
def _model(self, beta):
""" Creates the structure of the model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
lambda : np.array
Contains the values for the conditional volatility series
Y : np.array
Contains the length-adjusted time series (accounting for lags)
scores : np.array
Contains the score terms for the time series
"""
Y = np.array(self.data[self.max_lag:self.data.shape[0]])
X = np.ones(Y.shape[0])
scores = np.zeros(Y.shape[0])
# Transform latent variables
parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])])
lmda = np.zeros(Y.shape[0])
theta = np.zeros(Y.shape[0])
# Loop over time series
for t in range(0,Y.shape[0]):
if t < self.max_lag:
lmda[t] = parm[-len(self.X_names)*2]/(1-np.sum(parm[:self.p]))
theta[t] = np.dot(self.X[t],parm[-len(self.X_names):])
else:
# Loop over GARCH terms
for p_term in range(0,self.p):
lmda[t] += parm[p_term]*lmda[t-p_term-1]
# Loop over Score terms
for q_term in range(0,self.q):
lmda[t] += parm[self.p+q_term]*scores[t-q_term-1]
if self.leverage is True:
lmda[t] += parm[-(len(self.X_names)*2)-3]*np.sign(-(Y[t-1]-theta[t-1]))*(scores[t-1]+1)
lmda[t] += np.dot(self.X[t],parm[-len(self.X_names)*2:-len(self.X_names)])
theta[t] = np.dot(self.X[t],parm[-len(self.X_names):]) + parm[-(len(self.X_names)*2)-1]*np.exp(lmda[t]/2.0)
scores[t] = (((parm[self.p+self.q]+1.0)*np.power(Y[t]-theta[t],2))/float(parm[self.p+self.q]*np.exp(lmda[t]) + np.power(Y[t]-theta[t],2))) - 1.0
return lmda, Y, scores, theta
|
python
|
def on(self, state):
""" Turn on or off.
:param state: True (on) or False (off).
"""
self._on = state
cmd = self.command_set.off()
if state:
cmd = self.command_set.on()
self.send(cmd)
|
java
|
public static int cusolverRfExtractSplitFactorsHost(
cusolverRfHandle handle,
/** Output (in the host memory) */
Pointer h_nnzL,
Pointer h_csrRowPtrL,
Pointer h_csrColIndL,
Pointer h_csrValL,
Pointer h_nnzU,
Pointer h_csrRowPtrU,
Pointer h_csrColIndU,
Pointer h_csrValU)
{
return checkResult(cusolverRfExtractSplitFactorsHostNative(handle, h_nnzL, h_csrRowPtrL, h_csrColIndL, h_csrValL, h_nnzU, h_csrRowPtrU, h_csrColIndU, h_csrValU));
}
|
java
|
public SVGPath smoothQuadTo(double x, double y) {
return append(PATH_SMOOTH_QUAD_TO).append(x).append(y);
}
|
java
|
@Override
public void sessionClosed(IoSession session) throws Exception {
final InputStream in = (InputStream) session.getAttribute(KEY_IN);
final OutputStream out = (OutputStream) session.getAttribute(KEY_OUT);
try {
in.close();
} finally {
out.close();
}
}
|
java
|
public KamEdge resolve(final Kam kam, final KAMStore kAMStore,
final String subject, final RelationshipType r,
final String object, Map<String, String> nsmap, Equivalencer eq)
throws ResolverException {
if (nulls(kam, kAMStore, subject, r, object, eq)) {
throw new InvalidArgument(
"null parameter(s) provided to resolve API.");
}
// resolve subject bel term to kam node.
final KamNode subjectKamNode = resolve(kam, kAMStore, subject, nsmap, eq);
if (subjectKamNode == null) return null;
// resolve object bel term to kam node.
final KamNode objectKamNode = resolve(kam, kAMStore, object, nsmap, eq);
if (objectKamNode == null) return null;
// only resolve edge if kam nodes resolved
return resolveEdge(kam, subjectKamNode, r, objectKamNode);
}
|
java
|
public static void initialise(AcceptListenerFactory _acceptListenerFactory) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "initalise");
acceptListenerFactory = _acceptListenerFactory;
// Create the maintainer of the configuration.
Framework framework = Framework.getInstance();
if (framework == null) {
state = State.INITIALISATION_FAILED;
} else {
state = State.INITIALISED;
// Extract the chain reference.
connectionTracker = new OutboundConnectionTracker(framework);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "initalise");
}
|
python
|
def convert_to_pmag_data_list(self, lst_or_dict="lst", df=None):
"""
Take MagicDataFrame and turn it into a list of dictionaries.
This will have the same format as reading in a 2.5 file
with pmag.magic_read(), i.e.:
if "lst":
[{"sample": "samp_name", "azimuth": 12, ...}, {...}]
if "dict":
{"samp_name": {"azimuth": 12, ...}, "samp_name2": {...}, ...}
NOTE: "dict" not recommended with 3.0, as one sample can have
many rows, which means that dictionary items can be overwritten
"""
if isinstance(df, type(None)):
df = self.df
# replace np.nan / None with ""
df = df.where(df.notnull(), "")
# string-i-fy everything
df = df.astype(str)
if lst_or_dict == "lst":
return list(df.T.apply(dict))
else:
return {str(i[df.index.name.split(' ')[0]]): dict(i) for i in list(df.T.apply(dict))}
|
python
|
def _create_field_mapping_action(self):
"""Create action for showing field mapping dialog."""
icon = resources_path('img', 'icons', 'show-mapping-tool.svg')
self.action_field_mapping = QAction(
QIcon(icon),
self.tr('InaSAFE Field Mapping Tool'),
self.iface.mainWindow())
self.action_field_mapping.setStatusTip(self.tr(
'Assign field mapping to layer.'))
self.action_field_mapping.setWhatsThis(self.tr(
'Use this tool to assign field mapping in layer.'))
self.action_field_mapping.setEnabled(False)
self.action_field_mapping.triggered.connect(self.show_field_mapping)
self.add_action(
self.action_field_mapping, add_to_toolbar=self.full_toolbar)
|
python
|
def parse_import_names(sourcecode, top_level=True, fpath=None, branch=False):
"""
Finds all function names in a file without importing it
Args:
sourcecode (str):
Returns:
list: func_names
CommandLine:
python -m utool.util_inspect parse_import_names
References:
https://stackoverflow.com/questions/20445733/how-to-tell-which-modules-have-been-imported-in-some-source-code
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_inspect import * # NOQA
>>> import utool as ut
>>> fpath = ut.util_inspect.__file__.replace('.pyc', '.py')
>>> #fpath = ut.truepath('~/code/bintrees/bintrees/avltree.py')
>>> sourcecode = ut.readfrom(fpath)
>>> func_names = parse_import_names(sourcecode)
>>> result = ('func_names = %s' % (ut.repr2(func_names),))
>>> print(result)
"""
import ast
import_names = []
if six.PY2:
import utool as ut
sourcecode = ut.ensure_unicode(sourcecode)
encoded = sourcecode.encode('utf8')
pt = ast.parse(encoded)
else:
pt = ast.parse(sourcecode)
modules = []
class ImportVisitor(ast.NodeVisitor):
def _parse_alias_list(self, aliases):
for alias in aliases:
if alias.asname is not None:
import_names.append(alias.asname)
else:
if '.' not in alias.name:
import_names.append(alias.name)
def visit_Import(self, node):
self._parse_alias_list(node.names)
self.generic_visit(node)
for alias in node.names:
modules.append(alias.name)
def visit_ImportFrom(self, node):
self._parse_alias_list(node.names)
self.generic_visit(node)
for alias in node.names:
prefix = ''
if node.level:
if fpath is not None:
from xdoctest import static_analysis as static
modparts = static.split_modpath(os.path.abspath(fpath))[1].replace('\\', '/').split('/')
parts = modparts[:-node.level]
# parts = os.path.split(static.split_modpath(os.path.abspath(fpath))[1])[:-node.level]
prefix = '.'.join(parts) + '.'
# prefix = '.'.join(os.path.split(fpath)[-node.level:]) + '.'
else:
prefix = '.' * node.level
# modules.append(node.level * '.' + node.module + '.' + alias.name)
# modules.append(prefix + node.module + '.' + alias.name)
modules.append(prefix + node.module)
def visit_FunctionDef(self, node):
# Ignore modules imported in functions
if not top_level:
self.generic_visit(node)
# ast.NodeVisitor.generic_visit(self, node)
def visit_ClassDef(self, node):
if not top_level:
self.generic_visit(node)
# ast.NodeVisitor.generic_visit(self, node)
def visit_If(self, node):
if not branch:
# TODO: determine how to figure out if a name is in all branches
if not _node_is_main_if(node):
# Ignore the main statement
self.generic_visit(node)
try:
ImportVisitor().visit(pt)
except Exception:
pass
return import_names, modules
|
java
|
public void setPackageId(String packageId) throws InvalidArgumentException {
if (sourceUnavailable) {
throw new InvalidArgumentException("The source none has be set to true already. Can not have packageId set when source none set to true.");
}
if (Utils.isNullOrEmpty(packageId)) {
throw new InvalidArgumentException("The packageId parameter can not be null or empty.");
}
this.packageId = packageId;
}
|
python
|
def evaluate_script(self, script, **kwargs):
"""
Evaluates the supplied script (python-executable string).
Useful for testing your scripts!
globals already include all of numpy objects plus
self = self
f = self.f
bg = self.bg
and all the current guess parameters and constants
kwargs are added to globals for script evaluation.
"""
self._set_data_globals.update(kwargs)
return eval(script, self._set_data_globals)
|
java
|
@Override
public long dynamicQueryCount(DynamicQuery dynamicQuery,
Projection projection) {
return commerceAvailabilityEstimatePersistence.countWithDynamicQuery(dynamicQuery,
projection);
}
|
java
|
public static Trades adaptToTrades(GlobitexTrades globitexTrades, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>();
globitexTrades
.getRecentTrades()
.forEach(
globitexTrade -> {
trades.add(adaptToTrade(globitexTrade, currencyPair));
});
return new Trades(trades, Trades.TradeSortType.SortByTimestamp);
}
|
java
|
public static void setChar (@Nonnull final CharSequence aSeq, final int nIndex, @Nonnull final Codepoint aCodepoint)
{
setChar (aSeq, nIndex, aCodepoint.getValue ());
}
|
java
|
protected boolean executeExportOperation(IJarExportRunnable op, IStatus wizardPageStatus) {
try {
getContainer().run(true, true, op);
} catch (InterruptedException e) {
return false;
} catch (InvocationTargetException ex) {
if (ex.getTargetException() != null) {
ExceptionHandler.handle(ex, getShell(), FatJarPackagerMessages.JarPackageWizard_jarExportError_title, FatJarPackagerMessages.JarPackageWizard_jarExportError_message);
return false;
}
}
IStatus status= op.getStatus();
if (!status.isOK()) {
if (!wizardPageStatus.isOK()) {
if (!(status instanceof MultiStatus))
status= new MultiStatus(status.getPlugin(), status.getCode(), status.getMessage(), status.getException());
((MultiStatus) status).add(wizardPageStatus);
}
ErrorDialog.openError(getShell(), FatJarPackagerMessages.JarPackageWizard_jarExport_title, null, status);
return !(status.matches(IStatus.ERROR));
} else if (!wizardPageStatus.isOK()) {
ErrorDialog.openError(getShell(), FatJarPackagerMessages.JarPackageWizard_jarExport_title, null, wizardPageStatus);
}
return true;
}
|
python
|
def tracing(pattern=None, out=None):
"""Print executed lines to stdout."""
_trace = partial(trace_line, pattern)
if out is None:
out = sys.stdout
with redirect_stdout(out):
sys.settrace(_trace)
try:
yield
finally:
sys.settrace(None)
|
java
|
@Override
public void removeByGroupId(long groupId) {
for (CPMeasurementUnit cpMeasurementUnit : findByGroupId(groupId,
QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(cpMeasurementUnit);
}
}
|
python
|
def _get(self):
"""get and parse data stored in self.path."""
data, stat = self.zk.get(self.path)
if not len(data):
return {}, stat.version
if self.OLD_SEPARATOR in data:
return self._get_old()
return json.loads(data), stat.version
|
java
|
@Override
public DescribeTapeArchivesResult describeTapeArchives(DescribeTapeArchivesRequest request) {
request = beforeClientExecution(request);
return executeDescribeTapeArchives(request);
}
|
java
|
private ReturnCodes parseQValue(byte[] data, int start) {
// 414433 - redo the qvalue parsing to handle more error conditions
ReturnCodes rc = new ReturnCodes(false);
int len = data.length;
int index = skipWhiteSpace(data, start);
// we should be pointing at "q=X"
// technically it's supposed to be just 'q', but check uppercase too
if (index >= len || ('q' != data[index] && 'Q' != data[index])) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Non qvalue found");
}
rc.setIntValue(index);
return rc;
}
index++;
index = skipWhiteSpace(data, index);
if (index >= len || HttpBaseMessage.EQUALS != data[index]) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Qvalue missing equals");
}
rc.setIntValue(index);
return rc;
}
index++;
index = skipWhiteSpace(data, index);
// now we should be pointing at a float value
if (index < len && ('1' == data[index] || '0' == data[index])) {
// default the "approval" flag based on leading digit
boolean leadingOne = ('1' == data[index]);
rc.setBooleanValue(leadingOne);
if (++index >= len || ',' == data[index]) {
// reached end of data, single digit
} else if (' ' == data[index] || '\t' == data[index]) {
// whitespace, scan for the comma
index = skipWhiteSpace(data, index);
if (index < len && ',' != data[index]) {
// nonwhitespace found [q=1 q] invalid
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Invalid char after trailing whitespace (1) ["
+ data[index] + "]");
}
rc.setBooleanValue(false);
}
} else if ('.' != data[index]) {
// required to be a period for the float
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Non-period found after leading digit");
}
rc.setBooleanValue(false);
} else {
// now we must only have up to 3 digits, or 1.000
// stop at eol, whitespace, or comma
int numDigits = 0;
while (++index < len && ',' != data[index]) {
if ('0' <= data[index] && '9' >= data[index]) {
numDigits++;
if ('0' != data[index]) {
if (leadingOne) {
// 1.000 is the only valid 1* value
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Non-zero after a leading one");
}
rc.setBooleanValue(false);
break; // out of while
}
rc.setBooleanValue(true);
}
} else {
// non-digit found
break; // out of while
}
} // while not end of data
if (3 < numDigits) {
// too many digits
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Too many digits in float (" + numDigits + ")");
}
rc.setBooleanValue(false);
} else if (index >= len || ',' == data[index]) {
// end of qvalue found
} else if (' ' == data[index] || '\t' == data[index]) {
// whitespace, scan for the comma
index = skipWhiteSpace(data, ++index);
if (index < len && ',' != data[index]) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Invalid char after trailing whitespace (2) ["
+ data[index] + "]");
}
rc.setBooleanValue(false);
}
} else {
// invalid character found
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Invalid char after number [" + data[index] + "]");
}
rc.setBooleanValue(false);
}
}
} // starts with a digit
// index is pointing to the last char looked at
rc.setIntValue(index);
return rc;
}
|
java
|
public MigrateArgs<K> keys(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys must not be null");
for (K key : keys) {
this.keys.add(key);
}
return this;
}
|
java
|
@Override
public Request<ReplaceTransitGatewayRouteRequest> getDryRunRequest() {
Request<ReplaceTransitGatewayRouteRequest> request = new ReplaceTransitGatewayRouteRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
|
java
|
private static synchronized MarshallerBuilder getMarshallerBuilder() {
ServiceLoader<MarshallerBuilder> builders = ServiceLoader.load(MarshallerBuilder.class);
Iterator<MarshallerBuilder> it = builders.iterator();
if (it.hasNext()) {
MarshallerBuilder marshallerBuilder = it.next();
logger.debug("Found custom marshaller builder {} that is going to be used instead of the default", marshallerBuilder);
return marshallerBuilder;
}
return new BaseMarshallerBuilder();
}
|
java
|
public static String bytesToString(
final byte[] arr, final boolean signed, final boolean littleEndian, final String sep) {
final StringBuilder sb = new StringBuilder();
final int mask = (signed) ? 0XFFFFFFFF : 0XFF;
final int arrLen = arr.length;
if (littleEndian) {
for (int i = 0; i < (arrLen - 1); i++) {
sb.append(arr[i] & mask).append(sep);
}
sb.append(arr[arrLen - 1] & mask);
} else {
for (int i = arrLen; i-- > 1; ) {
sb.append(arr[i] & mask).append(sep);
}
sb.append(arr[0] & mask);
}
return sb.toString();
}
|
java
|
public BatchDeleteBuildsResult withBuildsNotDeleted(BuildNotDeleted... buildsNotDeleted) {
if (this.buildsNotDeleted == null) {
setBuildsNotDeleted(new java.util.ArrayList<BuildNotDeleted>(buildsNotDeleted.length));
}
for (BuildNotDeleted ele : buildsNotDeleted) {
this.buildsNotDeleted.add(ele);
}
return this;
}
|
java
|
public String getString(final String key, final Object... args) {
String value = getString(key);
if (value == null) {
value = key;
}
MessageFormat mf = new MessageFormat(value);
mf.setLocale(locale);
return mf.format(args, new StringBuffer(), null).toString();
}
|
java
|
public void init(Record record, BaseField field, boolean bNewOnChange)
{
super.init(record);
m_field = field;
m_bNewOnChange = bNewOnChange;
}
|
python
|
def map_column(self, only_use_one=None, source_column=None, species=None, target_selection= None, verbose=False):
"""
Uses the BridgeDB service to look up analogous identifiers from a wide
selection of other databases
:param only_use_one (string, optional): When multiple identifiers can be
mapped from a single term, this forces a singular result
:param source_column (string): Specifies the column nmae where the
source identifiers are located = ['']
:param source_selection (string): Specifies the database describing
the existing identifiers = ['']
:param species (string, optional): The combined common or latin name of
the species to which the identifiers apply = ['Human (Homo sapiens)',
'Mouse (Mus musculus)', 'Rat (Rattus norvegicus)', 'Frog (Xenopus tropicalis)',
'Zebra fish (Danio rerio)', 'Fruit fly (Drosophila melanogaster)',
'Mosquito (Anopheles gambiae)', 'Arabidopsis thaliana (Arabidopsis thaliana)',
'Yeast (Saccharomyces cerevisiae)', 'E. coli (Escherichia coli)',
'Tuberculosis (Mycobacterium tuberculosis)', 'Worm (Caenorhabditis elegans)']
:param target_selection (string): Specifies the database identifiers to be looked up = ['']
:param verbose: print more
:returns: eg. { "new column": "SGD " }
"""
PARAMS=set_param(["only_use_one","source_column","species","target_selection"],[only_use_one,source_column,species,target_selection])
response=api(url=self.__url+"/map column", PARAMS=PARAMS, method="POST", verbose=verbose)
return response
|
java
|
@Deprecated
public static <T extends Enum<T>> EnumTemplate<T> enumTemplate(Class<? extends T> cl,
String template, ImmutableList<?> args) {
return enumTemplate(cl, createTemplate(template), args);
}
|
python
|
def split_and_strip(string, separator_regexp=None, maxsplit=0):
"""Split a string into items and trim any excess spaces from the items
>>> split_and_strip('fred, was, here ')
['fred', 'was', 'here']
"""
if not string:
return ['']
if separator_regexp is None:
separator_regexp = _default_separator()
if not separator_regexp:
return string.split()
return [item.strip()
for item in re.split(separator_regexp, string, maxsplit)]
|
python
|
def DateStringToDateObject(date_string):
"""Return a date object for a string "YYYYMMDD"."""
# If this becomes a bottleneck date objects could be cached
if re.match('^\d{8}$', date_string) == None:
return None
try:
return datetime.date(int(date_string[0:4]), int(date_string[4:6]),
int(date_string[6:8]))
except ValueError:
return None
|
python
|
def get_topic_triggers(rs, topic, thats, depth=0, inheritance=0, inherited=False):
"""Recursively scan a topic and return a list of all triggers.
Arguments:
rs (RiveScript): A reference to the parent RiveScript instance.
topic (str): The original topic name.
thats (bool): Are we getting triggers for 'previous' replies?
depth (int): Recursion step counter.
inheritance (int): The inheritance level counter, for topics that
inherit other topics.
inherited (bool): Whether the current topic is inherited by others.
Returns:
[]str: List of all triggers found.
"""
# Break if we're in too deep.
if depth > rs._depth:
rs._warn("Deep recursion while scanning topic inheritance")
# Keep in mind here that there is a difference between 'includes' and
# 'inherits' -- topics that inherit other topics are able to OVERRIDE
# triggers that appear in the inherited topic. This means that if the top
# topic has a trigger of simply '*', then NO triggers are capable of
# matching in ANY inherited topic, because even though * has the lowest
# priority, it has an automatic priority over all inherited topics.
#
# The getTopicTriggers method takes this into account. All topics that
# inherit other topics will have their triggers prefixed with a fictional
# {inherits} tag, which would start at {inherits=0} and increment if this
# topic has other inheriting topics. So we can use this tag to make sure
# topics that inherit things will have their triggers always be on top of
# the stack, from inherits=0 to inherits=n.
# Important info about the depth vs inheritance params to this function:
# depth increments by 1 each time this function recursively calls itrs.
# inheritance increments by 1 only when this topic inherits another
# topic.
#
# This way, '> topic alpha includes beta inherits gamma' will have this
# effect:
# alpha and beta's triggers are combined together into one matching
# pool, and then those triggers have higher matching priority than
# gamma's.
#
# The inherited option is True if this is a recursive call, from a topic
# that inherits other topics. This forces the {inherits} tag to be added
# to the triggers. This only applies when the top topic 'includes'
# another topic.
rs._say("\tCollecting trigger list for topic " + topic + "(depth="
+ str(depth) + "; inheritance=" + str(inheritance) + "; "
+ "inherited=" + str(inherited) + ")")
# topic: the name of the topic
# depth: starts at 0 and ++'s with each recursion
# Topic doesn't exist?
if not topic in rs._topics:
rs._warn("Inherited or included topic {} doesn't exist or has no triggers".format(
topic
))
return []
# Collect an array of triggers to return.
triggers = []
# Get those that exist in this topic directly.
inThisTopic = []
if not thats:
# The non-that structure is {topic}->[array of triggers]
if topic in rs._topics:
for trigger in rs._topics[topic]:
inThisTopic.append([ trigger["trigger"], trigger ])
else:
# The 'that' structure is: {topic}->{cur trig}->{prev trig}->{trig info}
if topic in rs._thats.keys():
for curtrig in rs._thats[topic].keys():
for previous, pointer in rs._thats[topic][curtrig].items():
inThisTopic.append([ pointer["trigger"], pointer ])
# Does this topic include others?
if topic in rs._includes:
# Check every included topic.
for includes in rs._includes[topic]:
rs._say("\t\tTopic " + topic + " includes " + includes)
triggers.extend(get_topic_triggers(rs, includes, thats, (depth + 1), inheritance, True))
# Does this topic inherit others?
if topic in rs._lineage:
# Check every inherited topic.
for inherits in rs._lineage[topic]:
rs._say("\t\tTopic " + topic + " inherits " + inherits)
triggers.extend(get_topic_triggers(rs, inherits, thats, (depth + 1), (inheritance + 1), False))
# Collect the triggers for *this* topic. If this topic inherits any
# other topics, it means that this topic's triggers have higher
# priority than those in any inherited topics. Enforce this with an
# {inherits} tag.
if topic in rs._lineage or inherited:
for trigger in inThisTopic:
rs._say("\t\tPrefixing trigger with {inherits=" + str(inheritance) + "}" + trigger[0])
triggers.append(["{inherits=" + str(inheritance) + "}" + trigger[0], trigger[1]])
else:
triggers.extend(inThisTopic)
return triggers
|
java
|
public String getMultipartClass()
{
if ( _forceMultipartDisabled ) return null;
if ( _overrideMultipartClass != null ) return _overrideMultipartClass;
MultipartHandler mpHandler = InternalUtils.getMultipartHandlerType();
if ( mpHandler != null )
{
switch ( mpHandler.getValue() )
{
case MultipartHandler.INT_DISABLED: return null;
case MultipartHandler.INT_MEMORY: return COMMONS_MULTIPART_HANDLER_CLASS;
case MultipartHandler.INT_DISK: return COMMONS_MULTIPART_HANDLER_CLASS;
default: assert false : "unknown value for multipart handler: " + mpHandler.toString();
}
}
return null;
}
|
java
|
public Spliterator<E> spliterator() {
return (m instanceof ConcurrentSkipListMap)
? ((ConcurrentSkipListMap<E,?>)m).keySpliterator()
: ((ConcurrentSkipListMap.SubMap<E,?>)m).new SubMapKeyIterator();
}
|
python
|
def yaml_to_str(data: Mapping) -> str:
"""
Return the given given config as YAML str.
:param data: configuration dict
:return: given configuration as yaml str
"""
return yaml.dump(data, Dumper=ruamel.yaml.RoundTripDumper)
|
python
|
def import_instance(
self,
name,
input_config,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Import a Redis RDB snapshot file from GCS into a Redis instance.
Redis may stop serving during this operation. Instance state will be
IMPORTING for entire operation. When complete, the instance will contain
only data from the imported file.
The returned operation is automatically deleted after a few hours, so
there is no need to call DeleteOperation.
Example:
>>> from google.cloud import redis_v1
>>>
>>> client = redis_v1.CloudRedisClient()
>>>
>>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]')
>>>
>>> # TODO: Initialize `input_config`:
>>> input_config = {}
>>>
>>> response = client.import_instance(name, input_config)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
name (str): Required. Redis instance resource name using the form:
``projects/{project_id}/locations/{location_id}/instances/{instance_id}``
where ``location_id`` refers to a GCP region
input_config (Union[dict, ~google.cloud.redis_v1.types.InputConfig]): Required. Specify data to be imported.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.redis_v1.types.InputConfig`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.redis_v1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "import_instance" not in self._inner_api_calls:
self._inner_api_calls[
"import_instance"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.import_instance,
default_retry=self._method_configs["ImportInstance"].retry,
default_timeout=self._method_configs["ImportInstance"].timeout,
client_info=self._client_info,
)
request = cloud_redis_pb2.ImportInstanceRequest(
name=name, input_config=input_config
)
operation = self._inner_api_calls["import_instance"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
cloud_redis_pb2.Instance,
metadata_type=cloud_redis_pb2.OperationMetadata,
)
|
python
|
def flush(self):
"""
Trigger a flush of the queue.
Note: this method will only return once the queue is empty. This means it can block indefinitely if more events
are produced in other threads than can be consumed.
"""
self.queue(None, None, flush=True)
if not self._flushed.wait(timeout=self._max_flush_time):
raise ValueError("flush timed out")
|
java
|
@Pure
public static Iterator<URL> getClasspath() {
Iterator<URL> iterator = getStartClasspath();
final ClassLoader loader = ClassLoaderFinder.findClassLoader();
try {
final DynamicURLClassLoader dLoader = (DynamicURLClassLoader) loader;
iterator = new FilteringIterator(Arrays.asList(dLoader.getURLs()).iterator());
} catch (ClassCastException exception1) {
if (ClasspathUtil.class.getClassLoader() != loader) {
try {
try (URLClassLoader dLoader = (URLClassLoader) loader) {
iterator = new IteratorIterator(
new FilteringIterator(Arrays.asList(dLoader.getURLs()).iterator()),
iterator);
}
} catch (ClassCastException | IOException exception2) {
//
}
}
}
return iterator;
}
|
python
|
def get_operators(self, name=None):
"""Get the list of :py:class:`Operator` elements associated with this job.
Args:
name(str): Only return operators matching `name`, where `name` can be a regular expression. If
`name` is not supplied, then all operators for this job are returned.
Returns:
list(Operator): List of Operator elements associated with this job.
Retrieving a list of operators whose name contains the string "temperatureSensor" could be performed as followed
Example:
>>> from streamsx import rest
>>> sc = rest.StreamingAnalyticsConnection()
>>> instances = sc.get_instances()
>>> job = instances[0].get_jobs()[0]
>>> operators = job.get_operators(name="*temperatureSensor*")
.. versionchanged:: 1.9 `name` parameter added.
"""
return self._get_elements(self.operators, 'operators', Operator, name=name)
|
java
|
public CMAPersonalAccessToken addScope(Scope scope) {
if (scopes == null) {
scopes = new ArrayList<Scope>();
}
scopes.add(scope);
return this;
}
|
python
|
def input_loop():
'''wait for user input'''
while mestate.exit != True:
try:
if mestate.exit != True:
line = input(mestate.rl.prompt)
except EOFError:
mestate.exit = True
sys.exit(1)
mestate.input_queue.put(line)
|
python
|
def derive(self, path):
"""
:param path: a path like "m/44'/0'/1'/0/10" if deriving from a master key,
or a relative path like "./0/10"
:return: the derived ExtendedPublicKey if deriving from an ExtendedPublicKey,
the derived ExtendedPrivateKey if deriving from an ExtendedPrivateKey
"""
steps = path.split('/')
if steps[0] not in {'m', '.'}:
raise ValueError('Invalid derivation path: {}'.format(path))
if steps[0] == 'm' and not self.is_master():
raise ValueError('Trying to derive absolute path from non-master key')
current = self
for step in steps[1:]:
hardened = False
if step[-1] == "'":
hardened = True
step = step[:-1]
index = int(step)
current = current.get_child(index, hardened)
return current
|
java
|
@Override
public Collection<KeyTransaction> deserialize(JsonElement element, Type type, JsonDeserializationContext context)
throws JsonParseException
{
JsonObject obj = element.getAsJsonObject();
JsonArray kts = obj.getAsJsonArray("key_transactions");
List<KeyTransaction> values = new ArrayList<KeyTransaction>();
if(kts != null && kts.isJsonArray())
{
for(JsonElement kt : kts)
values.add(gson.fromJson(kt, KeyTransaction.class));
}
return values;
}
|
java
|
public List<ServletType<WebAppType<T>>> getAllServlet()
{
List<ServletType<WebAppType<T>>> list = new ArrayList<ServletType<WebAppType<T>>>();
List<Node> nodeList = childNode.get("servlet");
for(Node node: nodeList)
{
ServletType<WebAppType<T>> type = new ServletTypeImpl<WebAppType<T>>(this, "servlet", childNode, node);
list.add(type);
}
return list;
}
|
java
|
@FormatMethod
private void reportError(@FormatString String message, Object... arguments) {
errorReporter.reportError(scanner.getPosition(), message, arguments);
}
|
python
|
def create_oracle_cx_oracle(username, password, host, port, database, **kwargs): # pragma: no cover
"""
create an engine connected to a oracle database using cx_oracle.
"""
return create_engine(
_create_oracle_cx_oracle(username, password, host, port, database),
**kwargs
)
|
python
|
def parse(expression):
"""
Return array of parsed tokens in the expression
expression String: Math expression to parse in infix notation
"""
result = []
current = ""
for i in expression:
if i.isdigit() or i == '.':
current += i
else:
if len(current) > 0:
result.append(current)
current = ""
if i in __operators__ or i in __parenthesis__:
result.append(i)
else:
raise Exception("invalid syntax " + i)
if len(current) > 0:
result.append(current)
return result
|
python
|
def pull(self, repository, tag=None, stream=True, **kwargs):
"""
Identical to :meth:`dockermap.client.base.DockerClientWrapper.pull` with two enhancements:
* additional logging;
* the ``insecure_registry`` flag can be passed through ``kwargs``, or set as default using
``env.docker_registry_insecure``.
"""
c_insecure = kwargs.pop('insecure_registry', env.get('docker_registry_insecure'))
set_raise_on_error(kwargs)
try:
return super(DockerFabricClient, self).pull(repository, tag=tag, stream=stream,
insecure_registry=c_insecure, **kwargs)
except DockerStatusError as e:
error(e.message)
|
python
|
def contract(self, process):
"""
this contracts the current node to its parent and
then either caclulates the params and values if all
child data exists, OR uses the default parent data.
(In real terms it returns the parent and recalculates)
TODO = processes need to be recalculated
"""
print('TODO: process check = ', process)
print(self.name, ' contracted to ->', self.parent)
return self.parent
|
python
|
def status(self):
"""What is the status of the job ?"""
# If there is no script it is either ready or a lost duplicate #
if not self.script_path.exists:
if self.name in jobs.names: return "DUPLICATE"
if self.name not in jobs.names: return "READY"
# It is submitted already #
if self.name in jobs.names:
if jobs[self.name]['type'] == 'queued': return "QUEUED"
if jobs[self.name]['type'] == 'running': return "RUNNING"
# So the script exists for sure but it is not in the queue #
if not self.kwargs['out_file'].exists: return "ABORTED"
# Let's look in log file #
if 'CANCELED' in self.log_tail: return "CANCELLED"
if 'slurmstepd: error' in self.log_tail: return "CANCELLED"
# It all looks good #
if 'SLURM: end at' in self.log_tail: return "FINISHED"
# At this point we have no idea #
return "INTERUPTED"
|
python
|
def assims(self, mot):
""" Cherche si la chaรฎne a peut subir une assimilation, renvoie cette chaรฎne รฉventuellement assimilรฉe.
:param mot: Mot pour lequel on doit vรฉrifier des assimilations
:type mot: str
:return: Mot assimilรฉ
:rtype: str
"""
for replaced, replacement in self._assimsq.items():
if mot.startswith(replaced):
mot = mot.replace(replaced, replacement)
return mot
return mot
|
python
|
def get_lock(self, lockname, locktime=60, auto_renewal=False):
''' Gets a lock and returns if it can be stablished. Returns false otherwise '''
pid = os.getpid()
caller = inspect.stack()[0][3]
try:
# rl = redlock.Redlock([{"host": settings.REDIS_SERVERS['std_redis']['host'], "port": settings.REDIS_SERVERS['std_redis']['port'], "db": settings.REDIS_SERVERS['std_redis']['db']}, ])
rl = redis_lock.Lock(self, lockname, expire=locktime, auto_renewal=auto_renewal)
except:
if self.logger:
self.logger.error('Process {0} ({1}) could not get lock {2}. Going ahead without locking!!! {3}'.format(pid, caller, lockname, traceback.format_exc()))
return False
try:
lock = rl.acquire(blocking=False)
except RedisError:
return False
if not lock:
return False
else:
return rl
|
python
|
def normalize_response_value(rv):
""" Normalize the response value into a 3-tuple (rv, status, headers)
:type rv: tuple|*
:returns: tuple(rv, status, headers)
:rtype: tuple(Response|JsonResponse|*, int|None, dict|None)
"""
status = headers = None
if isinstance(rv, tuple):
rv, status, headers = rv + (None,) * (3 - len(rv))
return rv, status, headers
|
python
|
def listtransactions(self, user_id="", count=10, start_at=0):
"""List all transactions associated with this account.
Args:
user_id (str): this user's unique identifier
count (int): number of transactions to return (default=10)
start_at (int): start the list at this transaction (default=0)
Returns:
list [dict]: transactions associated with this user's account
"""
txlist = self.rpc.call("listtransactions", user_id, count, start_at)
self.logger.debug("Got transaction list for " + str(user_id))
return txlist
|
java
|
public Observable<Void> deleteValueAsync(String tagName, String tagValue) {
return deleteValueWithServiceResponseAsync(tagName, tagValue).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
|
python
|
def qwarp_align(dset_from,dset_to,skull_strip=True,mask=None,affine_suffix='_aff',suffix='_qwarp',prefix=None):
'''aligns ``dset_from`` to ``dset_to`` using 3dQwarp
Will run ``3dSkullStrip`` (unless ``skull_strip`` is ``False``), ``3dUnifize``,
``3dAllineate``, and then ``3dQwarp``. This method will add suffixes to the input
dataset for the intermediate files (e.g., ``_ss``, ``_u``). If those files already
exist, it will assume they were intelligently named, and use them as is
:skull_strip: If True/False, turns skull-stripping of both datasets on/off.
If a string matching ``dset_from`` or ``dset_to``, will only
skull-strip the given dataset
:mask: Applies the given mask to the alignment. Because of the nature
of the alignment algorithms, the mask is **always** applied to
the ``dset_to``. If this isn't what you want, you need to reverse
the transform and re-apply it (e.g., using :meth:`qwarp_invert`
and :meth:`qwarp_apply`). If the ``dset_to`` dataset is skull-stripped,
the mask will also be resampled to match the ``dset_to`` grid.
:affine_suffix: Suffix applied to ``dset_from`` to name the new dataset, as well as
the ``.1D`` file.
:suffix: Suffix applied to the final ``dset_from`` dataset. An additional file
with the additional suffix ``_WARP`` will be created containing the parameters
(e.g., with the default ``_qwarp`` suffix, the parameters will be in a file with
the suffix ``_qwarp_WARP``)
:prefix: Alternatively to ``suffix``, explicitly give the full output filename
The output affine dataset and 1D, as well as the output of qwarp are named by adding
the given suffixes (``affine_suffix`` and ``qwarp_suffix``) to the ``dset_from`` file
If ``skull_strip`` is a string instead of ``True``/``False``, it will only skull strip the given
dataset instead of both of them
# TODO: currently does not work with +tlrc datasets because the filenames get mangled
'''
dset_ss = lambda dset: os.path.split(nl.suffix(dset,'_ns'))[1]
dset_u = lambda dset: os.path.split(nl.suffix(dset,'_u'))[1]
def dset_source(dset):
if skull_strip==True or skull_strip==dset:
return dset_ss(dset)
else:
return dset
dset_affine = os.path.split(nl.suffix(dset_from,affine_suffix))[1]
dset_affine_1D = nl.prefix(dset_affine) + '.1D'
dset_qwarp = prefix
if dset_qwarp==None:
dset_qwarp = os.path.split(nl.suffix(dset_from,suffix))[1]
if os.path.exists(dset_qwarp):
# final product already exists
return
affine_align(dset_from,dset_to,skull_strip,mask,affine_suffix)
for dset in [dset_from,dset_to]:
nl.run([
'3dUnifize',
'-prefix', dset_u(dset_source(dset)),
'-input', dset_source(dset)
],products=[dset_u(dset_source(dset))])
mask_use = mask
if mask:
# the mask was probably made in the space of the original dset_to anatomy,
# which has now been cropped from the skull stripping. So the lesion mask
# needs to be resampled to match the corresponding mask
if skull_strip==True or skull_strip==dset_to:
nl.run(['3dresample','-master',dset_u(dset_ss(dset)),'-inset',mask,'-prefix',nl.suffix(mask,'_resam')],products=nl.suffix(mask,'_resam'))
mask_use = nl.suffix(mask,'_resam')
warp_cmd = [
'3dQwarp',
'-prefix', dset_qwarp,
'-duplo', '-useweight', '-blur', '0', '3',
'-iwarp',
'-base', dset_u(dset_source(dset_to)),
'-source', dset_affine
]
if mask:
warp_cmd += ['-emask', mask_use]
nl.run(warp_cmd,products=dset_qwarp)
|
python
|
def serialize(self, format):
"""
Serialize provenance graph in the specified format
"""
if PY3:
return self.prov_g.serialize(format=format).decode('utf-8')
else:
return self.prov_g.serialize(format=format)
|
java
|
public Map<Long, Integer> computeDaysForResources(List<RESOURCE> resources) {
Map<Long, Integer> result = computeDays(getModificationDates(resources));
if (LOG.isDebugEnabled()) {
for (RESOURCE res : resources) {
LOG.debug(
"Resource "
+ getRootPath(res)
+ " is "
+ result.get(new Long(getDateLastModified(res)))
+ " days old.");
}
}
return result;
}
|
python
|
def response_hook(self, r, **kwargs):
"""The actual hook handler."""
if r.status_code == 401:
# Handle server auth.
www_authenticate = r.headers.get('www-authenticate', '').lower()
auth_type = _auth_type_from_header(www_authenticate)
if auth_type is not None:
return self.retry_using_http_NTLM_auth(
'www-authenticate',
'Authorization',
r,
auth_type,
kwargs
)
elif r.status_code == 407:
# If we didn't have server auth, do proxy auth.
proxy_authenticate = r.headers.get(
'proxy-authenticate', ''
).lower()
auth_type = _auth_type_from_header(proxy_authenticate)
if auth_type is not None:
return self.retry_using_http_NTLM_auth(
'proxy-authenticate',
'Proxy-authorization',
r,
auth_type,
kwargs
)
return r
|
java
|
@Requires("bytecode != null")
@Ensures("result != null")
private byte[] instrumentWithDebug(byte[] bytecode) {
ClassReader reader = new ClassReader(bytecode);
ClassWriter writer = new NonLoadingClassWriter(reader, 0);
reader.accept(new HelperClassAdapter(writer), ClassReader.EXPAND_FRAMES);
return writer.toByteArray();
}
|
java
|
private boolean mergeSpaces(ArrayList spaces) {
for (int source=0;source<spaces.size();source++) {
Space a = (Space) spaces.get(source);
for (int target=source+1;target<spaces.size();target++) {
Space b = (Space) spaces.get(target);
if (a.canMerge(b)) {
spaces.remove(a);
spaces.remove(b);
spaces.add(a.merge(b));
return true;
}
}
}
return false;
}
|
java
|
public Observable<ApplicationInsightsComponentAPIKeyInner> createAsync(String resourceGroupName, String resourceName, APIKeyRequest aPIKeyProperties) {
return createWithServiceResponseAsync(resourceGroupName, resourceName, aPIKeyProperties).map(new Func1<ServiceResponse<ApplicationInsightsComponentAPIKeyInner>, ApplicationInsightsComponentAPIKeyInner>() {
@Override
public ApplicationInsightsComponentAPIKeyInner call(ServiceResponse<ApplicationInsightsComponentAPIKeyInner> response) {
return response.body();
}
});
}
|
python
|
def mailfrom(self, sender=None, client_args=None):
"""
Send LMTP MAIL FROM command, and process the server response.
In DLMTP mode, the server expects the client to identify itself.
Because the envelope sender is of no importance to DSPAM, the client
is expected to send an identity and a password (dspam.conf:
ServerPass.<ident>="<password>") in stead of the actual sender.
When you need want DSPAM to deliver the message itself and need to
pass the server an actual envelope sender for that, add the
--mail-from parameter in client_args.
When the server is setup in LMTP mode only (dspam.conf:
ServerMode=standard), the envelope sender is a regular envelope
sender, and is re-used when delivering the message after processing.
Client args
===========
When in DLMTP mode (and with proper auth credentials), the server
accepts parameters specified by the client. These are in the form
as they are passed to the command-line 'dspam' program.
See man dspam(1) for details, and the process() or classify() methods
in this class for simple examples.
Args:
sender -- The envelope sender to use in LMTP mode.
client_args -- DSPAM parameters to pass to the server in DLMTP mode.
"""
if sender and client_args:
raise DspamClientError('Arguments are mutually exclusive')
if client_args and not self.dlmtp:
raise DspamClientError(
'Cannot send client args, server does not support DLMTP')
command = 'MAIL FROM:'
if not sender:
if self.dlmtp_ident and self.dlmtp_pass:
sender = self.dlmtp_pass + '@' + self.dlmtp_ident
else:
sender = ''
command = command + '<' + sender + '>'
if client_args:
command = command + ' DSPAMPROCESSMODE="{}"'.format(client_args)
self._send(command + '\r\n')
resp = self._read()
if not resp.startswith('250'):
raise DspamClientError(
'Unexpected server response at MAIL FROM: ' + resp)
|
python
|
def flushdb(self, async_op=False):
"""
Remove all keys from the current database.
:param async_op: lets a single database to be freed asynchronously. \
Defaults to False
"""
if async_op:
fut = self.execute(b'FLUSHDB', b'ASYNC')
else:
fut = self.execute(b'FLUSHDB')
return wait_ok(fut)
|
java
|
public com.squareup.okhttp.Call getSovereigntyMapAsync(String datasource, String ifNoneMatch,
final ApiCallback<List<SovereigntyMapResponse>> callback) throws ApiException {
com.squareup.okhttp.Call call = getSovereigntyMapValidateBeforeCall(datasource, ifNoneMatch, callback);
Type localVarReturnType = new TypeToken<List<SovereigntyMapResponse>>() {
}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
|
python
|
def _unsigned_to_signed(v, bits):
"""
Convert an unsigned integer to a signed integer.
:param v: The unsigned integer
:param bits: How many bits this integer should be
:return: The converted signed integer
"""
if StridedInterval._is_msb_zero(v, bits):
return v
else:
return -(2 ** bits - v)
|
java
|
@Override
public void sendAckExpectedMessage(
long ackExpStamp,
int priority,
Reliability reliability,
SIBUuid12 stream)
throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "sendAckExpectedMessage",
new Object[] { new Long(ackExpStamp), new Integer(priority), reliability, stream });
HashMap allPubSubOutputHandlers = _destination.getAllPubSubOutputHandlers();
try
{
Iterator itr = allPubSubOutputHandlers.values().iterator();
while (itr.hasNext())
{
PubSubOutputHandler handler = (PubSubOutputHandler) itr.next();
// Send AckExpected to all OutputHandlers
handler.processAckExpected(ackExpStamp, priority, reliability, stream);
}
} finally
{
// By calling the getAllPubSubOutputHandlers it will lock the
// handlers
_destination.unlockPubsubOutputHandlers();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "sendAckExpectedMessage");
}
|
java
|
@BetaApi
public final InterconnectsGetDiagnosticsResponse getDiagnosticsInterconnect(
ProjectGlobalInterconnectName interconnect) {
GetDiagnosticsInterconnectHttpRequest request =
GetDiagnosticsInterconnectHttpRequest.newBuilder()
.setInterconnect(interconnect == null ? null : interconnect.toString())
.build();
return getDiagnosticsInterconnect(request);
}
|
python
|
def get_most_specific_tinfo_dcnt(goids, go2nt):
"""Get the GO ID with the highest GO term annotation information value."""
# go2nt_usr = {go:go2nt[go] for go in goids}
# return max(go2nt_usr.items(), key=lambda t: [t[1].tinfo, t[1].dcnt])[0]
return max(_get_go2nt(goids, go2nt), key=lambda t: [t[1].tinfo, t[1].dcnt])[0]
|
java
|
public static VINT fromBinary(long binary) {
BitSet bs = BitSet.valueOf(new long[] { binary });
long mask = MASK_BYTE_1;
byte length = 1;
if (bs.length() > 3 * BIT_IN_BYTE) {
mask = MASK_BYTE_4;
length = 4;
} else if (bs.length() > 2 * BIT_IN_BYTE) {
mask = MASK_BYTE_3;
length = 3;
} else if (bs.length() > 1 * BIT_IN_BYTE) {
mask = MASK_BYTE_2;
length = 2;
}
long value = binary & mask;
return new VINT(binary, length, value);
}
|
python
|
def group_samaccountnames(self, base_dn):
"""For the current ADUser instance, determine which
groups the user is a member of and convert the
group DistinguishedNames to sAMAccountNames.
The resulting list of groups may not be complete
if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str base_dn: The base DN to search within
:return: A list of groups (sAMAccountNames) for which the
current ADUser instance is a member, sAMAccountNames
:rtype: list
"""
#pylint: disable=no-member
mappings = self.samaccountnames(base_dn, self.memberof)
#pylint: enable=no-member
groups = [samaccountname for samaccountname in mappings.values()]
if not groups:
logging.info("%s - unable to retrieve any groups for the current ADUser instance",
self.samaccountname)
return groups
|
java
|
private static void usage() {
StringBuilder usageText = new StringBuilder();
usageText.append("Usage: java -D").append(PROPERTY_PREFIX)
.append("config=esigate.properties -jar esigate-server.jar [start|stop]\n\t");
usageText.append("start Start the server (default)\n\t");
usageText.append("stop Stop the server gracefully\n\t");
System.out.println(usageText.toString());
System.exit(-1);
}
|
python
|
def get_psms(self):
"""Creates iterator to write to new tsv. Contains input tsv
lines plus quant data for these."""
self.header = actions.create_header(self.oldheader)
self.psms = actions.add_genes_to_psm_table(self.fn, self.oldheader,
self.lookup)
|
python
|
def prg_rom(self):
"""Return the PRG ROM of the ROM file."""
try:
return self.raw_data[self.prg_rom_start:self.prg_rom_stop]
except IndexError:
raise ValueError('failed to read PRG-ROM on ROM.')
|
java
|
public long getUnAvailableMessageCount()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "getUnAvailableMessageCount");
long returnValue = -1;
try
{
returnValue = getStatistics().getUnavailableItemCount();
}
catch(MessageStoreException e)
{
// FFDC
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.store.itemstreams.PtoPMessageItemStream.getUnAvailableMessageCount",
"1:894:1.93.1.14",
this);
SibTr.exception(tc, e);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getUnAvailableMessageCount", new Long(returnValue));
return returnValue;
}
|
java
|
@Override
public CreateSamplingRuleResult createSamplingRule(CreateSamplingRuleRequest request) {
request = beforeClientExecution(request);
return executeCreateSamplingRule(request);
}
|
java
|
@Deprecated
@PublicEvolving
public DataSink<T> print(String sinkIdentifier) {
return output(new PrintingOutputFormat<T>(sinkIdentifier, false));
}
|
java
|
protected HTTPResponse submitHTTPRequestImpl(HTTPRequest httpRequest,HTTPMethod httpMethod)
{
return this.httpClient.submitHTTPRequest(httpRequest,this.httpClientConfiguration,httpMethod);
}
|
java
|
public ReturnValueType<ConstructorType<T>> getOrCreateReturnValue()
{
Node node = childNode.getOrCreate("return-value");
ReturnValueType<ConstructorType<T>> returnValue = new ReturnValueTypeImpl<ConstructorType<T>>(this, "return-value", childNode, node);
return returnValue;
}
|
python
|
def decode(self, bytes, raw=False):
"""decode(bytearray, raw=False) -> value
Decodes the given bytearray according the corresponding
EVR Definition (:class:`EVRDefn`) for the underlying
'MSB_U16' EVR code.
If the optional parameter ``raw`` is ``True``, the EVR code
itself will be returned instead of the EVR Definition
(:class:`EVRDefn`).
"""
code = super(EVRType, self).decode(bytes)
result = None
if raw:
result = code
elif code in self.evrs.codes:
result = self.evrs.codes[code]
else:
result = code
log.warn('Unrecognized EVR code: %d' % code)
return result
|
python
|
def upsert(self, table: str, record: dict, create_cols: bool=False,
dtypes: list=None, pks=["id"], namefields=["id"]):
"""
Upsert a record in a table
"""
try:
self.db[table].upsert(record, pks, create_cols, dtypes)
except Exception as e:
self.err(e, "Can not upsert data")
return
names = ""
for el in namefields:
names += " " + record[el]
self.ok("Upserted record"+names)
|
java
|
public void auditDeletePatientRecordEvent(RFC3881EventOutcomeCodes eventOutcome,
String pixManagerUri, String receivingFacility, String receivingApp,
String sendingFacility, String sendingApp,
String hl7MessageControlId,
String patientId)
{
if (!isAuditorEnabled()) {
return;
}
auditPatientRecordEvent(true,
new IHETransactionEventTypeCodes.PatientIdentityFeed(), eventOutcome, RFC3881EventCodes.RFC3881EventActionCodes.DELETE,
sendingFacility, sendingApp, getSystemAltUserId(), getSystemNetworkId(),
receivingFacility, receivingApp, null, EventUtils.getAddressForUrl(pixManagerUri, false),
getHumanRequestor(),
hl7MessageControlId,
new String[] {patientId}, null, null);
}
|
python
|
def __get_agent_host_port(self):
"""
Iterates the the various ways the host and port of the Instana host
agent may be configured: default, env vars, sensor options...
"""
host = AGENT_DEFAULT_HOST
port = AGENT_DEFAULT_PORT
if "INSTANA_AGENT_HOST" in os.environ:
host = os.environ["INSTANA_AGENT_HOST"]
if "INSTANA_AGENT_PORT" in os.environ:
port = int(os.environ["INSTANA_AGENT_PORT"])
elif "INSTANA_AGENT_IP" in os.environ:
# Deprecated: INSTANA_AGENT_IP environment variable
# To be removed in a future version
host = os.environ["INSTANA_AGENT_IP"]
if "INSTANA_AGENT_PORT" in os.environ:
port = int(os.environ["INSTANA_AGENT_PORT"])
elif self.agent.sensor.options.agent_host != "":
host = self.agent.sensor.options.agent_host
if self.agent.sensor.options.agent_port != 0:
port = self.agent.sensor.options.agent_port
return host, port
|
python
|
def process_models(attrs, base_model_class):
"""
Attach default fields and meta options to models
"""
attrs.update(base_model_class._DEFAULT_BASE_FIELDS)
attrs['_instance_registry'] = set()
attrs['_is_unpermitted_fields_set'] = False
attrs['save_meta_data'] = None
attrs['_pre_save_hook_called'] = False
attrs['_post_save_hook_called'] = False
DEFAULT_META = {'bucket_type': settings.DEFAULT_BUCKET_TYPE,
'field_permissions': {},
'app': 'main',
'list_fields': [],
'list_filters': [],
'search_fields': [],
'fake_model': False,
}
if 'Meta' not in attrs:
attrs['Meta'] = type('Meta', (object,), DEFAULT_META)
else:
for k, v in DEFAULT_META.items():
if k not in attrs['Meta'].__dict__:
setattr(attrs['Meta'], k, v)
|
python
|
def get_grade_systems(self):
"""Gets all ``GradeSystems``.
In plenary mode, the returned list contains all known grade
systems or an error results. Otherwise, the returned list may
contain only those grade systems that are accessible through
this session.
return: (osid.grading.GradeSystemList) - a ``GradeSystemList``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('grading',
collection='GradeSystem',
runtime=self._runtime)
result = collection.find(self._view_filter()).sort('_id', DESCENDING)
return objects.GradeSystemList(result, runtime=self._runtime, proxy=self._proxy)
|
python
|
def get_header(self, configuration=False):
"""Get the log file header
If configuration is True, this returns the daemon configuration
:return: A string list containing project name, daemon name, version, licence etc.
:rtype: list
"""
header = [u"-----",
u" โโโโโโ โโโ โโโ โโโโโโโ โโโโ โโโ โโโโโโ โโโ โโโ",
u" โโโโโโโโโโโ โโโโโโโโโโโ โโโโโ โโโโโโโโโโโโโโ โโโโ",
u" โโโโโโโโโโโ โโโโโโ โโโโโโโโโโ โโโโโโโโโโโโโโโโโโ ",
u" โโโโโโโโโโโ โโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโ ",
u" โโโ โโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโ โโโโโโ โโโ",
u" โโโ โโโโโโโโโโโโโโ โโโโโโโ โโโ โโโโโโโโ โโโโโโ โโโ",
u"-----",
u"Alignak %s - %s daemon" % (VERSION, self.name),
u"Copyright (c) 2015-2019: Alignak Team",
u"License: AGPL",
u"-----",
u"Python: %s.%s" % (sys.version_info.major, sys.version_info.minor),
u"-----",
u"My pid: %s" % self.pid]
if configuration:
header = ["My configuration: "]
for prop, _ in sorted(self.properties.items()):
header.append(" - %s=%s" % (prop, getattr(self, prop)))
return header
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.