language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public void setFormatsByArgumentIndex(Format[] newFormats) {
if (msgPattern.hasNamedArguments()) {
throw new IllegalArgumentException(
"This method is not available in MessageFormat objects " +
"that use alphanumeric argument names.");
}
for (int partIndex = 0; (partIndex = nextTopLevelArgStart(partIndex)) >= 0;) {
int argNumber = msgPattern.getPart(partIndex + 1).getValue();
if (argNumber < newFormats.length) {
setCustomArgStartFormat(partIndex, newFormats[argNumber]);
}
}
}
|
python
|
def Unpack(self, msg):
"""Unpacks the current Any message into specified message."""
descriptor = msg.DESCRIPTOR
if not self.Is(descriptor):
return False
msg.ParseFromString(self.value)
return True
|
java
|
private boolean execute(boolean readResponse) throws IOException {
try {
httpEngine.sendRequest();
route = httpEngine.getRoute();
handshake = httpEngine.getConnection() != null
? httpEngine.getConnection().getHandshake()
: null;
if (readResponse) {
httpEngine.readResponse();
}
return true;
} catch (RequestException e) {
// An attempt to interpret a request failed.
IOException toThrow = e.getCause();
httpEngineFailure = toThrow;
throw toThrow;
} catch (RouteException e) {
// The attempt to connect via a route failed. The request will not have been sent.
HttpEngine retryEngine = httpEngine.recover(e);
if (retryEngine != null) {
httpEngine = retryEngine;
return false;
}
// Give up; recovery is not possible.
IOException toThrow = e.getLastConnectException();
httpEngineFailure = toThrow;
throw toThrow;
} catch (IOException e) {
// An attempt to communicate with a server failed. The request may have been sent.
HttpEngine retryEngine = httpEngine.recover(e);
if (retryEngine != null) {
httpEngine = retryEngine;
return false;
}
// Give up; recovery is not possible.
httpEngineFailure = e;
throw e;
}
}
|
java
|
public NfsMkdirRequest makeMkdirRequest(NfsSetAttributes attributes) throws IOException {
return getNfs().makeMkdirRequest(getParentFile().getFileHandle(), getName(), attributes);
}
|
java
|
@Override
public void close() throws IOException {
RootMetricContext.get().removeNotificationTarget(this.notificationTargetUUID);
RootMetricContext.get().removeReporter(this);
}
|
python
|
def smartDumpDictHdf5(RV,o):
""" Dump a dictionary where each page is a list or an array or still a dictionary (in this case, it iterates)"""
for key in list(RV.keys()):
if type(RV[key])==dict:
g = o.create_group(key)
smartDumpDictHdf5(RV[key],g)
else:
o.create_dataset(name=key,data=SP.array(RV[key]),chunks=True,compression='gzip')
|
java
|
public ChannelBuffer formatJVMStatsV1(final Map<String, Map<String, Object>> map) {
throw new BadRequestException(HttpResponseStatus.NOT_IMPLEMENTED,
"The requested API endpoint has not been implemented",
this.getClass().getCanonicalName() +
" has not implemented formatJVMStatsV1");
}
|
python
|
def add(self, *args, **kwargs):
"""Add Cookie objects by their names, or create new ones under
specified names.
Any unnamed arguments are interpreted as existing cookies, and
are added under the value in their .name attribute. With keyword
arguments, the key is interpreted as the cookie name and the
value as the UNENCODED value stored in the cookie.
"""
# Only the first one is accessible through the main interface,
# others accessible through get_all (all_cookies).
for cookie in args:
self.all_cookies.append(cookie)
if cookie.name in self:
continue
self[cookie.name] = cookie
for key, value in kwargs.items():
cookie = self.cookie_class(key, value)
self.all_cookies.append(cookie)
if key in self:
continue
self[key] = cookie
|
python
|
def from_proto(cls, proto_mesh, scale,
pos, vel, euler, euler_vel,
rotation_vel=(0,0,0),
component_com_x=None):
"""
Turn a ProtoMesh into a Mesh scaled and placed in orbit.
Update all geometry fields from the proto reference frame, to the
current system reference frame, given the current position, velocitiy,
euler angles, and rotational velocity of THIS mesh.
:parameter list pos: current position (x, y, z)
:parameter list vel: current velocity (vx, vy, vz)
:parameter list euler: current euler angles (etheta, elongan, eincl)
:parameter list rotation_vel: rotation velocity vector (polar_dir*freq_rot)
"""
mesh = cls(**proto_mesh.items())
mesh._copy_roche_values()
mesh._scale_mesh(scale=scale)
mesh._place_in_orbit(pos, vel, euler, euler_vel, rotation_vel, component_com_x)
return mesh
|
java
|
private synchronized void initialize() {
if (this.runtimes != null) {
return;
}
this.runtimes = new HashMap<>();
for (final AvroRuntimeDefinition rd : runtimeDefinition.getRuntimes()) {
try {
// We need to create different injector for each runtime as they define conflicting bindings. Also we cannot
// fork the original injector because of the same reason.
// We create new injectors and copy form the original injector what we need.
// rootInjector is an emptyInjector that we copy bindings from the original injector into. Then we fork
// it to instantiate the actual runtime.
final Injector rootInjector = Tang.Factory.getTang().newInjector();
initializeInjector(rootInjector);
final Configuration runtimeConfig =
Tang.Factory.getTang().newConfigurationBuilder()
.bindNamedParameter(RuntimeName.class, rd.getRuntimeName().toString())
.bindImplementation(Runtime.class, RuntimeImpl.class)
.build();
final Configuration config =
new AvroConfigurationSerializer().fromString(rd.getSerializedConfiguration().toString());
final Injector runtimeInjector = rootInjector.forkInjector(config, runtimeConfig);
this.runtimes.put(rd.getRuntimeName().toString(), runtimeInjector.getInstance(Runtime.class));
} catch (final IOException | InjectionException e) {
throw new RuntimeException("Unable to initialize runtimes.", e);
}
}
}
|
python
|
def get_single_assignment(self, id, course_id, all_dates=None, include=None, needs_grading_count_by_section=None, override_assignment_dates=None):
"""
Get a single assignment.
Returns the assignment with the given id.
"observed_users" is passed, submissions for observed users will also be included.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""ID"""
path["course_id"] = course_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
# OPTIONAL - include
"""Associations to include with the assignment. The "assignment_visibility" option
requires that the Differentiated Assignments course feature be turned on. If"""
if include is not None:
self._validate_enum(include, ["submission", "assignment_visibility", "overrides", "observed_users"])
params["include"] = include
# OPTIONAL - override_assignment_dates
"""Apply assignment overrides to the assignment, defaults to true."""
if override_assignment_dates is not None:
params["override_assignment_dates"] = override_assignment_dates
# OPTIONAL - needs_grading_count_by_section
"""Split up "needs_grading_count" by sections into the "needs_grading_count_by_section" key, defaults to false"""
if needs_grading_count_by_section is not None:
params["needs_grading_count_by_section"] = needs_grading_count_by_section
# OPTIONAL - all_dates
"""All dates associated with the assignment, if applicable"""
if all_dates is not None:
params["all_dates"] = all_dates
self.logger.debug("GET /api/v1/courses/{course_id}/assignments/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/assignments/{id}".format(**path), data=data, params=params, single_item=True)
|
python
|
def get_new(self):
"""List new files since last recorded file state.
pysat stores filenames in the user_home/.pysat directory. Returns
a list of all new fileanmes since the last known change to files.
Filenames are stored if there is a change and either update_files
is True at instrument object level or files.refresh() is called.
Returns
-------
pandas.Series
files are indexed by datetime
"""
# refresh files
self.refresh()
# current files
new_info = self._load()
# previous set of files
old_info = self._load(prev_version=True)
new_files = new_info[-new_info.isin(old_info)]
return new_files
|
java
|
public SolutionCategory getCategory(List<List<ColumnName>> names) {
Piece xPiece = null;
// find the "x" piece
for(Piece p: pieces) {
if ("x".equals(p.name)) {
xPiece = p;
break;
}
}
// find the row containing the "x"
for(List<ColumnName> row: names) {
if (row.contains(xPiece)) {
// figure out where the "x" is located
int low_x = width;
int high_x = 0;
int low_y = height;
int high_y = 0;
for(ColumnName col: row) {
if (col instanceof Point) {
int x = ((Point) col).x;
int y = ((Point) col).y;
if (x < low_x) {
low_x = x;
}
if (x > high_x) {
high_x = x;
}
if (y < low_y) {
low_y = y;
}
if (y > high_y) {
high_y = y;
}
}
}
boolean mid_x = (low_x + high_x == width - 1);
boolean mid_y = (low_y + high_y == height - 1);
if (mid_x && mid_y) {
return SolutionCategory.CENTER;
} else if (mid_x) {
return SolutionCategory.MID_X;
} else if (mid_y) {
return SolutionCategory.MID_Y;
}
break;
}
}
return SolutionCategory.UPPER_LEFT;
}
|
python
|
def from_result(cls, container, result):
"""Create from ambiguous result."""
if result is None:
raise errors.NoObjectException
elif cls.is_prefix(result):
return cls.from_prefix(container, result)
elif cls.is_key(result):
return cls.from_key(container, result)
raise errors.CloudException("Unknown boto result type: %s" %
type(result))
|
python
|
def traffic(stack_name: str,
stack_version: Optional[str],
percentage: Optional[int],
region: Optional[str],
remote: Optional[str],
output: Optional[str]):
'''Manage stack traffic'''
lizzy = setup_lizzy_client(remote)
if percentage is None:
stack_reference = [stack_name]
with Action('Requesting traffic info..'):
stack_weights = []
for stack in lizzy.get_stacks(stack_reference, region=region):
if stack['status'] in ['CREATE_COMPLETE', 'UPDATE_COMPLETE']:
stack_id = '{stack_name}-{version}'.format_map(stack)
traffic = lizzy.get_traffic(stack_id, region=region)
stack_weights.append({
'stack_name': stack_name,
'version': stack['version'],
'identifier': stack_id,
'weight%': traffic['weight']
})
cols = 'stack_name version identifier weight%'.split()
with OutputFormat(output):
print_table(cols,
sorted(stack_weights, key=lambda x: x['identifier']))
else:
with Action('Requesting traffic change..'):
stack_id = '{stack_name}-{stack_version}'.format_map(locals())
lizzy.traffic(stack_id, percentage, region=region)
|
python
|
def SetOption(self, section, option, value, overwrite=True):
"""Set the value of an option in the config file.
Args:
section: string, the section of the config file to check.
option: string, the option to set the value of.
value: string, the value to set the option.
overwrite: bool, True to overwrite an existing value in the config file.
"""
if not overwrite and self.config.has_option(section, option):
return
if not self.config.has_section(section):
self.config.add_section(section)
self.config.set(section, option, str(value))
|
java
|
public static Trades adaptTrades(BitstampTransaction[] transactions, CurrencyPair currencyPair) {
List<Trade> trades = new ArrayList<>();
long lastTradeId = 0;
for (BitstampTransaction tx : transactions) {
final long tradeId = tx.getTid();
if (tradeId > lastTradeId) {
lastTradeId = tradeId;
}
trades.add(adaptTrade(tx, currencyPair, 1000));
}
return new Trades(trades, lastTradeId, TradeSortType.SortByID);
}
|
python
|
def get_objects(self, path, marker=None,
limit=settings.CLOUD_BROWSER_DEFAULT_LIST_LIMIT):
"""Get objects."""
from itertools import islice
path = path.rstrip(SEP) + SEP if path else path
result_set = self.native_container.list(path, SEP, marker)
# Get +1 results because marker and first item can match as we strip
# the separator from results obscuring things. No real problem here
# because boto masks any real request limits.
results = list(islice(result_set, limit+1))
if results:
if marker and results[0].name.rstrip(SEP) == marker.rstrip(SEP):
results = results[1:]
else:
results = results[:limit]
return [self.obj_cls.from_result(self, r) for r in results]
|
java
|
public static void setOffsetInZooKeeper(CuratorFramework curatorClient, String groupId, String topic, int partition, long offset) throws Exception {
ZKGroupTopicDirs topicDirs = new ZKGroupTopicDirs(groupId, topic);
String path = topicDirs.consumerOffsetDir() + "/" + partition;
curatorClient.newNamespaceAwareEnsurePath(path).ensure(curatorClient.getZookeeperClient());
byte[] data = Long.toString(offset).getBytes(ConfigConstants.DEFAULT_CHARSET);
curatorClient.setData().forPath(path, data);
}
|
python
|
def intensity_measure_types(value):
"""
:param value: input string
:returns: non-empty list of Intensity Measure Type objects
>>> intensity_measure_types('PGA')
['PGA']
>>> intensity_measure_types('PGA, SA(1.00)')
['PGA', 'SA(1.0)']
>>> intensity_measure_types('SA(0.1), SA(0.10)')
Traceback (most recent call last):
...
ValueError: Duplicated IMTs in SA(0.1), SA(0.10)
>>> intensity_measure_types('SA(1), PGA')
Traceback (most recent call last):
...
ValueError: The IMTs are not sorted by period: SA(1), PGA
"""
imts = []
for chunk in value.split(','):
imts.append(imt.from_string(chunk.strip()))
sorted_imts = sorted(imts, key=lambda im: getattr(im, 'period', 1))
if len(distinct(imts)) < len(imts):
raise ValueError('Duplicated IMTs in %s' % value)
if sorted_imts != imts:
raise ValueError('The IMTs are not sorted by period: %s' % value)
return [str(imt) for imt in imts]
|
python
|
def dest(self):
"""Create link destination details."""
if hasattr(self, "parent") and self.parent is None:
raise ValueError("orphaned object: parent is None")
if self.parent.parent.isClosed or self.parent.parent.isEncrypted:
raise ValueError("operation illegal for closed / encrypted doc")
doc = self.parent.parent
if self.isExternal or self.uri.startswith("#"):
uri = None
else:
uri = doc.resolveLink(self.uri)
return linkDest(self, uri)
|
python
|
def to_python(fname, *args):
"""
Parse a NRML file and return an associated Python object. It works by
calling nrml.read() and node_to_obj() in sequence.
"""
[node] = read(fname)
return node_to_obj(node, fname, *args)
|
java
|
public void setStackIds(java.util.Collection<String> stackIds) {
if (stackIds == null) {
this.stackIds = null;
return;
}
this.stackIds = new com.amazonaws.internal.SdkInternalList<String>(stackIds);
}
|
java
|
public void serviceName_templatesControl_POST(String serviceName, OvhTypeTemplateEnum activity, String description, String message, String name, String reason) throws IOException {
String qPath = "/sms/{serviceName}/templatesControl";
StringBuilder sb = path(qPath, serviceName);
HashMap<String, Object>o = new HashMap<String, Object>();
addBody(o, "activity", activity);
addBody(o, "description", description);
addBody(o, "message", message);
addBody(o, "name", name);
addBody(o, "reason", reason);
exec(qPath, "POST", sb.toString(), o);
}
|
python
|
def save_intermediate_img(self, img, name):
"""Save intermediate FITS objects."""
if self.intermediate_results:
img.writeto(name, overwrite=True)
|
java
|
private void validateChildNodeDefinition( JcrNodeDefinition childNodeDefinition,
List<Name> supertypes,
List<JcrNodeType> pendingTypes ) throws RepositoryException {
if (childNodeDefinition.isAutoCreated() && !childNodeDefinition.isProtected()
&& childNodeDefinition.defaultPrimaryTypeName() == null) {
throw new InvalidNodeTypeDefinitionException(JcrI18n.autocreatedNodesNeedDefaults.text(childNodeDefinition.getName()));
}
boolean residual = JcrNodeType.RESIDUAL_ITEM_NAME.equals(childNodeDefinition.getName());
if (childNodeDefinition.isMandatory() && residual) {
throw new InvalidNodeTypeDefinitionException(
JcrI18n.residualNodeDefinitionsCannotBeMandatory.text(childNodeDefinition.getName()));
}
if (childNodeDefinition.isAutoCreated() && residual) {
throw new InvalidNodeTypeDefinitionException(
JcrI18n.residualNodeDefinitionsCannotBeAutoCreated.text(childNodeDefinition.getName()));
}
Name childNodeName = context.getValueFactories().getNameFactory().create(childNodeDefinition.getName());
childNodeName = childNodeName == null ? JcrNodeType.RESIDUAL_NAME : childNodeName;
List<JcrNodeDefinition> childNodesInAncestors = findChildNodeDefinitions(supertypes, childNodeName, NodeCardinality.ANY,
pendingTypes);
for (JcrNodeDefinition childNodeFromAncestor : childNodesInAncestors) {
if (childNodeFromAncestor.isProtected()) {
throw new InvalidNodeTypeDefinitionException(
JcrI18n.cannotOverrideProtectedDefinition.text(childNodeFromAncestor.getDeclaringNodeType()
.getName(),
"child node"));
}
if (childNodeFromAncestor.isMandatory() && !childNodeDefinition.isMandatory()) {
throw new InvalidNodeTypeDefinitionException(
JcrI18n.cannotMakeMandatoryDefinitionOptional.text(childNodeFromAncestor.getDeclaringNodeType()
.getName(),
"child node"));
}
Name[] requiredPrimaryTypeNames = childNodeFromAncestor.requiredPrimaryTypeNames();
for (Name requiredPrimaryTypeName : requiredPrimaryTypeNames) {
NodeType requiredPrimaryTypeFromAncestor = findTypeInMapOrList(requiredPrimaryTypeName, pendingTypes);
if (requiredPrimaryTypeFromAncestor == null) {
I18n msg = JcrI18n.couldNotFindDefinitionOfRequiredPrimaryType;
throw new InvalidNodeTypeDefinitionException(msg.text(requiredPrimaryTypeName, childNodeDefinition.getName(),
childNodeDefinition.getDeclaringNodeType()));
}
boolean found = false;
for (Name name : childNodeDefinition.requiredPrimaryTypeNames()) {
JcrNodeType childNodePrimaryType = findTypeInMapOrList(name, pendingTypes);
if (childNodePrimaryType != null
&& childNodePrimaryType.isNodeType(requiredPrimaryTypeFromAncestor.getName())) {
found = true;
break;
}
}
// Allow side-by-side definitions of residual child nodes per JCR 1.0.1 spec 6.7.8
if (!found && !residual) {
I18n msg = JcrI18n.cannotRedefineChildNodeWithIncompatibleDefinition;
throw new InvalidNodeTypeDefinitionException(msg.text(childNodeName,
requiredPrimaryTypeFromAncestor.getName(),
childNodeDefinition.getDeclaringNodeType()));
}
}
}
}
|
python
|
def execute_one(correlation_id, component, args):
"""
Executes specific component.
To be executed components must implement [[IExecutable]] interface.
If they don't the call to this method has no effect.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param component: the component that is to be executed.
:param args: execution arguments.
:return: execution result
"""
if isinstance(component, IExecutable):
return component.execute(correlation_id, args)
return None
|
python
|
def equal (a, b):
""" Returns True iff 'a' contains the same elements as 'b', irrespective of their order.
# TODO: Python 2.4 has a proper set class.
"""
assert is_iterable(a)
assert is_iterable(b)
return contains (a, b) and contains (b, a)
|
python
|
async def load_by_path(self, path):
"""
Load a module by full path. If there are dependencies, they are also loaded.
"""
try:
p, module = findModule(path, True)
except KeyError as exc:
raise ModuleLoadException('Cannot load module ' + repr(path) + ': ' + str(exc) + 'is not defined in the package')
except Exception as exc:
raise ModuleLoadException('Cannot load module ' + repr(path) + ': ' + str(exc))
if module is None:
raise ModuleLoadException('Cannot find module: ' + repr(path))
return await self.loadmodule(module)
|
java
|
public OvhVoipLineService packName_voipLine_services_domain_GET(String packName, String domain) throws IOException {
String qPath = "/pack/xdsl/{packName}/voipLine/services/{domain}";
StringBuilder sb = path(qPath, packName, domain);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhVoipLineService.class);
}
|
python
|
def draw_sample(self, Xstar, n=0, num_samp=1, rand_vars=None,
rand_type='standard normal', diag_factor=1e3,
method='cholesky', num_eig=None, mean=None, cov=None,
modify_sign=None, **kwargs):
"""Draw a sample evaluated at the given points `Xstar`.
Note that this function draws samples from the GP given the current
values for the hyperparameters (which may be in a nonsense state if you
just created the instance or called a method that performs MCMC sampling).
If you want to draw random samples from MCMC output, use the
`return_samples` and `full_output` keywords to :py:meth:`predict`.
Parameters
----------
Xstar : array, (`M`, `D`)
`M` test input values of dimension `D`.
n : array, (`M`, `D`) or scalar, non-negative int, optional
Derivative order to evaluate at. Default is 0 (evaluate value).
noise : bool, optional
Whether or not to include the noise components of the kernel in the
sample. Default is False (no noise in samples).
num_samp : Positive int, optional
Number of samples to draw. Default is 1. Cannot be used in
conjunction with `rand_vars`: If you pass both `num_samp` and
`rand_vars`, `num_samp` will be silently ignored.
rand_vars : array, (`M`, `P`), optional
Vector of random variables :math:`u` to use in constructing the
sample :math:`y_* = f_* + Lu`, where :math:`K=LL^T`. If None,
values will be produced using
:py:func:`numpy.random.multivariate_normal`. This allows you to use
pseudo/quasi random numbers generated by an external routine. Note
that, when `method` is 'eig', the eigenvalues are in *ascending*
order.
Default is None (use :py:func:`multivariate_normal` directly).
rand_type : {'standard normal', 'uniform'}, optional
Type of distribution the inputs are given with.
* 'standard normal': Standard (`mu` = 0, `sigma` = 1) normal
distribution (this is the default)
* 'uniform': Uniform distribution on [0, 1). In this case
the required Gaussian variables are produced with inversion.
diag_factor : float, optional
Number (times machine epsilon) added to the diagonal of the
covariance matrix prior to computing its Cholesky decomposition.
This is necessary as sometimes the decomposition will fail because,
to machine precision, the matrix appears to not be positive definite.
If you are getting errors from :py:func:`scipy.linalg.cholesky`, try
increasing this an order of magnitude at a time. This parameter only
has an effect when using rand_vars. Default value is 1e3.
method : {'cholesky', 'eig'}, optional
Method to use for constructing the matrix square root. Default is
'cholesky' (use lower-triangular Cholesky decomposition).
* 'cholesky': Perform Cholesky decomposition on the covariance
matrix: :math:`K=LL^T`, use :math:`L` as the matrix square
root.
* 'eig': Perform an eigenvalue decomposition on the covariance
matrix: :math:`K=Q \\Lambda Q^{-1}`, use :math:`Q\\Lambda^{1/2}`
as the matrix square root.
num_eig : int or None, optional
Number of eigenvalues to compute. Can range from 1 to `M` (the
number of test points). If it is None, then all eigenvalues are
computed. Default is None (compute all eigenvalues). This keyword
only has an effect if `method` is 'eig'.
mean : array, (`M`,), optional
If you have pre-computed the mean and covariance matrix, then you
can simply pass them in with the `mean` and `cov` keywords to save
on having to call :py:meth:`predict`.
cov : array, (`M`, `M`), optional
If you have pre-computed the mean and covariance matrix, then you
can simply pass them in with the `mean` and `cov` keywords to save
on having to call :py:meth:`predict`.
modify_sign : {None, 'left value', 'right value', 'left slope', 'right slope', 'left concavity', 'right concavity'}, optional
If None (the default), the eigenvectors as returned by
:py:func:`scipy.linalg.eigh` are used without modification. To
modify the sign of the eigenvectors (necessary for some advanced use
cases), set this kwarg to one of the following:
* 'left value': forces the first value of each eigenvector to be
positive.
* 'right value': forces the last value of each eigenvector to be
positive.
* 'left slope': forces the slope to be positive at the start of
each eigenvector.
* 'right slope': forces the slope to be positive at the end of
each eigenvector.
* 'left concavity': forces the second derivative to be positive
at the start of each eigenvector.
* 'right concavity': forces the second derivative to be positive
at the end of each eigenvector.
**kwargs : optional kwargs
All extra keyword arguments are passed to :py:meth:`predict` when
evaluating the mean and covariance matrix of the GP.
Returns
-------
samples : :py:class:`Array` (`M`, `P`) or (`M`, `num_samp`)
Samples evaluated at the `M` points.
Raises
------
ValueError
If rand_type or method is invalid.
"""
# All of the input processing for Xstar and n will be done in here:
if mean is None or cov is None:
out = self.predict(Xstar, n=n, full_output=True, **kwargs)
mean = out['mean']
cov = out['cov']
if rand_vars is None and method != 'eig':
try:
return numpy.random.multivariate_normal(mean, cov, num_samp).T
except numpy.linalg.LinAlgError as e:
if self.verbose:
warnings.warn(
"Failure when drawing from MVN! Falling back on eig. "
"Exception was:\n%s"
% (e,),
RuntimeWarning
)
method = 'eig'
if num_eig is None or num_eig > len(mean):
num_eig = len(mean)
elif num_eig < 1:
num_eig = 1
if rand_vars is None:
rand_vars = numpy.random.standard_normal((num_eig, num_samp))
valid_types = ('standard normal', 'uniform')
if rand_type not in valid_types:
raise ValueError(
"rand_type %s not recognized! Valid options are: %s."
% (rand_type, valid_types,)
)
if rand_type == 'uniform':
rand_vars = scipy.stats.norm.ppf(rand_vars)
if method == 'cholesky':
L = scipy.linalg.cholesky(
cov + diag_factor * sys.float_info.epsilon * scipy.eye(cov.shape[0]),
lower=True,
check_finite=False
)
elif method == 'eig':
# TODO: Add support for specifying cutoff eigenvalue!
# Not technically lower triangular, but we'll keep the name L:
eig, Q = scipy.linalg.eigh(
cov + diag_factor * sys.float_info.epsilon * scipy.eye(cov.shape[0]),
eigvals=(len(mean) - 1 - (num_eig - 1), len(mean) - 1)
)
if modify_sign is not None:
if modify_sign == 'left value':
modify_mask = (Q[0, :] < 0.0)
elif modify_sign == 'right value':
modify_mask = (Q[-1, :] < 0.0)
elif modify_sign == 'left slope':
modify_mask = ((Q[1, :] - Q[0, :]) < 0.0)
elif modify_sign == 'right slope':
modify_mask = ((Q[-1, :] - Q[-2, :]) < 0.0)
elif modify_sign == 'left concavity':
modify_mask = ((Q[2, :] - 2 * Q[1, :] + Q[0, :]) < 0.0)
elif modify_sign == 'right concavity':
modify_mask = ((Q[-1, :] - 2 * Q[-2, :] + Q[-3, :]) < 0.0)
else:
raise ValueError(
"modify_sign %s not recognized!" % (modify_sign,)
)
Q[:, modify_mask] *= -1.0
Lam_1_2 = scipy.diag(scipy.sqrt(eig))
L = Q.dot(Lam_1_2)
else:
raise ValueError("method %s not recognized!" % (method,))
return scipy.atleast_2d(mean).T + L.dot(rand_vars[:num_eig, :])
|
python
|
def fetcher_with_object(cls, parent_object, relationship="child"):
""" Register the fetcher for a served object.
This method will fill the fetcher with `managed_class` instances
Args:
parent_object: the instance of the parent object to serve
Returns:
It returns the fetcher instance.
"""
fetcher = cls()
fetcher.parent_object = parent_object
fetcher.relationship = relationship
rest_name = cls.managed_object_rest_name()
parent_object.register_fetcher(fetcher, rest_name)
return fetcher
|
java
|
public JimfsPath emptyPath() {
JimfsPath result = emptyPath;
if (result == null) {
// use createPathInternal to avoid recursive call from createPath()
result = createPathInternal(null, ImmutableList.of(Name.EMPTY));
emptyPath = result;
return result;
}
return result;
}
|
python
|
def update_job_queue(self, queue_name, priority, state, compute_env_order):
"""
Update a job queue
:param queue_name: Queue name
:type queue_name: str
:param priority: Queue priority
:type priority: int
:param state: Queue state
:type state: string
:param compute_env_order: Compute environment list
:type compute_env_order: list of dict
:return: Tuple of Name, ARN
:rtype: tuple of str
"""
if queue_name is None:
raise ClientException('jobQueueName must be provided')
job_queue = self.get_job_queue(queue_name)
if job_queue is None:
raise ClientException('Job queue {0} does not exist'.format(queue_name))
if state is not None:
if state not in ('ENABLED', 'DISABLED'):
raise ClientException('state {0} must be one of ENABLED | DISABLED'.format(state))
job_queue.state = state
if compute_env_order is not None:
if len(compute_env_order) == 0:
raise ClientException('At least 1 compute environment must be provided')
try:
# orders and extracts computeEnvironment names
ordered_compute_environments = [item['computeEnvironment'] for item in sorted(compute_env_order, key=lambda x: x['order'])]
env_objects = []
# Check each ARN exists, then make a list of compute env's
for arn in ordered_compute_environments:
env = self.get_compute_environment_by_arn(arn)
if env is None:
raise ClientException('Compute environment {0} does not exist'.format(arn))
env_objects.append(env)
except Exception:
raise ClientException('computeEnvironmentOrder is malformed')
job_queue.env_order_json = compute_env_order
job_queue.environments = env_objects
if priority is not None:
job_queue.priority = priority
return queue_name, job_queue.arn
|
java
|
public double getATMForward(AnalyticModelInterface model, boolean isFirstPeriodIncluded) {
if(!Double.isNaN(cachedATMForward) && cacheStateModel.get() == model && cacheStateIsFirstPeriodIncluded == isFirstPeriodIncluded) {
return cachedATMForward;
}
ScheduleInterface remainderSchedule = schedule;
if(!isFirstPeriodIncluded) {
ArrayList<Period> periods = new ArrayList<Period>();
periods.addAll(schedule.getPeriods());
if(periods.size() > 1) {
periods.remove(0);
}
remainderSchedule = new Schedule(schedule.getReferenceDate(), periods, schedule.getDaycountconvention());
}
SwapLeg floatLeg = new SwapLeg(remainderSchedule, forwardCurveName, 0.0, discountCurveName, false);
SwapLeg annuityLeg = new SwapLeg(remainderSchedule, null, 1.0, discountCurveName, false);
cachedATMForward = floatLeg.getValue(model) / annuityLeg.getValue(model);
cacheStateModel = new SoftReference<AnalyticModelInterface>(model);
cacheStateIsFirstPeriodIncluded = isFirstPeriodIncluded;
return cachedATMForward;
}
|
python
|
def off(self, name, callback, once=False):
"""
Removes callback to the event specified by name
"""
if name not in self.event_listeners:
return
self.event_listeners[name].remove((callback, once))
|
java
|
public String convertIfcDocumentConfidentialityEnumToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
java
|
public static String toHex(byte b) {
StringBuilder sb = new StringBuilder();
appendByteAsHex(sb, b);
return sb.toString();
}
|
python
|
def touch(self, mode=0o666, exist_ok=True):
"""Create a fake file for the path with the given access mode,
if it doesn't exist.
Args:
mode: the file mode for the file if it does not exist
exist_ok: if the file already exists and this is True, nothing
happens, otherwise FileExistError is raised
Raises:
OSError: (Python 2 only) if the file exists and exits_ok is False.
FileExistsError: (Python 3 only) if the file exists and exits_ok is
False.
"""
if self._closed:
self._raise_closed()
if self.exists():
if exist_ok:
self.filesystem.utime(self._path(), None)
else:
self.filesystem.raise_os_error(errno.EEXIST, self._path())
else:
fake_file = self.open('w')
fake_file.close()
self.chmod(mode)
|
python
|
def actual_original_query_range(self):
""" This accounts for hard clipped bases
and a query sequence that hasnt been reverse complemented
:return: the range covered on the original query sequence
:rtype: GenomicRange
"""
l = self.original_query_sequence_length
a = self.alignment_ranges
qname = a[0][1].chr
qstart = a[0][1].start
qend = a[-1][1].end
#rng = self.get_query_range()
start = qstart
end = qend
if self.strand == '-':
end = l-(qstart-1)
start = 1+l-(qend)
return GenomicRange(qname,start,end,dir=self.strand)
|
java
|
public static <P> NearestNeighbor<P> kdRandomForest( KdTreeDistance<P> distance ,
int maxNodesSearched , int numTrees , int numConsiderSplit ,
long randomSeed ) {
Random rand = new Random(randomSeed);
return new KdForestBbfSearch<>(numTrees,maxNodesSearched,distance,
new AxisSplitterMedian<>(distance,new AxisSplitRuleRandomK(rand,numConsiderSplit)));
}
|
java
|
public @NotNull MediaArgs mediaFormat(MediaFormat value) {
if (value == null) {
this.mediaFormats = null;
}
else {
this.mediaFormats = new MediaFormat[] {
value
};
}
return this;
}
|
java
|
public float ENgetoption( OptionParameterCodes optionCode ) throws EpanetException {
float[] optionValue = new float[1];
int error = epanet.ENgetoption(optionCode.getCode(), optionValue);
checkError(error);
return optionValue[0];
}
|
java
|
private String buildConfiguration(Object cache) {
final StringBuilder sb = new StringBuilder();
// getCacheConfiguration() et getMaxElementsOnDisk() n'existent pas en ehcache 1.2
final CacheConfiguration config = ((Ehcache) cache).getCacheConfiguration();
sb.append("ehcache [maxElementsInMemory = ").append(config.getMaxElementsInMemory());
final boolean overflowToDisk = config.isOverflowToDisk();
sb.append(", overflowToDisk = ").append(overflowToDisk);
if (overflowToDisk) {
sb.append(", maxElementsOnDisk = ").append(config.getMaxElementsOnDisk());
}
final boolean eternal = config.isEternal();
sb.append(", eternal = ").append(eternal);
if (!eternal) {
sb.append(", timeToLiveSeconds = ").append(config.getTimeToLiveSeconds());
sb.append(", timeToIdleSeconds = ").append(config.getTimeToIdleSeconds());
sb.append(", memoryStoreEvictionPolicy = ")
.append(config.getMemoryStoreEvictionPolicy());
}
sb.append(", diskPersistent = ").append(config.isDiskPersistent());
sb.append(']');
return sb.toString();
}
|
python
|
def Fisher(d1, d2, tag=None):
"""
An F (fisher) random variate
Parameters
----------
d1 : int
Numerator degrees of freedom
d2 : int
Denominator degrees of freedom
"""
assert (
int(d1) == d1 and d1 >= 1
), 'Fisher (F) "d1" must be an integer greater than 0'
assert (
int(d2) == d2 and d2 >= 1
), 'Fisher (F) "d2" must be an integer greater than 0'
return uv(ss.f(d1, d2), tag=tag)
|
java
|
public Observable<ServiceResponse<Image>> addImageWithServiceResponseAsync(String listId, Integer tag, String label) {
if (this.client.baseUrl() == null) {
throw new IllegalArgumentException("Parameter this.client.baseUrl() is required and cannot be null.");
}
if (listId == null) {
throw new IllegalArgumentException("Parameter listId is required and cannot be null.");
}
String parameterizedHost = Joiner.on(", ").join("{baseUrl}", this.client.baseUrl());
return service.addImage(listId, tag, label, this.client.acceptLanguage(), parameterizedHost, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Image>>>() {
@Override
public Observable<ServiceResponse<Image>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Image> clientResponse = addImageDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
java
|
static WebClient createWebClient(CredHubProperties properties,
ClientHttpConnector clientHttpConnector) {
return buildWebClient(properties.getUrl(), clientHttpConnector)
.build();
}
|
python
|
def list_locks(root=None):
'''
List current package locks.
root
operate on a different root directory.
Return a dict containing the locked package with attributes::
{'<package>': {'case_sensitive': '<case_sensitive>',
'match_type': '<match_type>'
'type': '<type>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.list_locks
'''
locks = {}
_locks = os.path.join(root, os.path.relpath(LOCKS, os.path.sep)) if root else LOCKS
try:
with salt.utils.files.fopen(_locks) as fhr:
items = salt.utils.stringutils.to_unicode(fhr.read()).split('\n\n')
for meta in [item.split('\n') for item in items]:
lock = {}
for element in [el for el in meta if el]:
if ':' in element:
lock.update(dict([tuple([i.strip() for i in element.split(':', 1)]), ]))
if lock.get('solvable_name'):
locks[lock.pop('solvable_name')] = lock
except IOError:
pass
except Exception:
log.warning('Detected a problem when accessing %s', _locks)
return locks
|
python
|
def update_forward_refs(cls, **localns: Any) -> None:
"""
Try to update ForwardRefs on fields based on this Model, globalns and localns.
"""
globalns = sys.modules[cls.__module__].__dict__
globalns.setdefault(cls.__name__, cls)
for f in cls.__fields__.values():
update_field_forward_refs(f, globalns=globalns, localns=localns)
|
python
|
def CrossEntropyFlat(*args, axis:int=-1, **kwargs):
"Same as `nn.CrossEntropyLoss`, but flattens input and target."
return FlattenedLoss(nn.CrossEntropyLoss, *args, axis=axis, **kwargs)
|
java
|
public void setBundleId(String bundleId) {
if (JawrRequestHandler.CLIENTSIDE_HANDLER_REQ_PATH.equals(bundleId))
throw new IllegalArgumentException("The provided id [" + JawrRequestHandler.CLIENTSIDE_HANDLER_REQ_PATH
+ "] can't be used since it's the same as the clientside handler path. Please change this id (or the name of the script)");
this.bundleId = bundleId;
}
|
python
|
def color(out_string, color='grn'):
""" Highlight string for terminal color coding.
Purpose: We use this utility function to insert a ANSI/win32 color code
| and Bright style marker before a string, and reset the color and
| style after the string. We then return the string with these
| codes inserted.
@param out_string: the string to be colored
@type out_string: str
@param color: a string signifying which color to use. Defaults to 'grn'.
| Accepts the following colors:
| ['blk', 'blu', 'cyn', 'grn', 'mag', 'red', 'wht', 'yel']
@type color: str
@returns: the modified string, including the ANSI/win32 color codes.
@rtype: str
"""
c = {
'blk': Fore.BLACK,
'blu': Fore.BLUE,
'cyn': Fore.CYAN,
'grn': Fore.GREEN,
'mag': Fore.MAGENTA,
'red': Fore.RED,
'wht': Fore.WHITE,
'yel': Fore.YELLOW,
}
try:
init()
return (c[color] + Style.BRIGHT + out_string + Fore.RESET + Style.NORMAL)
except AttributeError:
return out_string
|
python
|
def send_template(self, template, to, reply_to=None, **context):
"""
Send Template message
"""
if self.provider == "SES":
self.mail.send_template(template=template, to=to, reply_to=reply_to, **context)
elif self.provider == "FLASK-MAIL":
ses_mail = ses_mailer.Mail(app=self.app)
data = ses_mail.parse_template(template=template, **context)
msg = flask_mail.Message(recipients=to,
subject=data["subject"],
body=data["body"],
reply_to=reply_to,
sender=self.app.config.get("MAIL_DEFAULT_SENDER")
)
self.mail.send(msg)
|
java
|
public void importRelation(String resourceName, String targetPath, String relationType) throws CmsException {
createRelation(resourceName, targetPath, relationType, true);
}
|
java
|
protected PlanNode attachProject( QueryContext context,
PlanNode plan,
List<? extends Column> columns,
Map<SelectorName, Table> selectors ) {
PlanNode projectNode = new PlanNode(Type.PROJECT);
List<Column> newColumns = new LinkedList<Column>();
List<String> newTypes = new ArrayList<String>();
final boolean multipleSelectors = selectors.size() > 1;
final boolean qualifyExpandedColumns = context.getHints().qualifyExpandedColumnNames;
if (columns == null || columns.isEmpty()) {
// SELECT *, so find all of the columns that are available from all the sources ...
for (Map.Entry<SelectorName, Table> entry : selectors.entrySet()) {
SelectorName tableName = entry.getKey();
Table table = entry.getValue();
// Add the selector that is being used ...
projectNode.addSelector(tableName);
// Compute the columns from this selector ...
allColumnsFor(table, tableName, newColumns, newTypes, qualifyExpandedColumns);
}
} else {
// Add the selector used by each column ...
for (Column column : columns) {
SelectorName tableName = column.selectorName();
// Add the selector that is being used ...
projectNode.addSelector(tableName);
// Verify that each column is available in the appropriate source ...
Table table = selectors.get(tableName);
if (table == null) {
context.getProblems().addError(GraphI18n.tableDoesNotExist, tableName);
} else {
// Make sure that the column is in the table ...
String columnName = column.getPropertyName();
if ("*".equals(columnName) || columnName == null) {
// This is a 'SELECT *' on this source, but this source is one of multiple sources ...
// See https://issues.apache.org/jira/browse/JCR-3313; TCK test expects 'true' for last param
allColumnsFor(table, tableName, newColumns, newTypes, qualifyExpandedColumns);
} else {
// This is a particular column, so add it ...
if (!newColumns.contains(column)) {
if (multipleSelectors && column.getPropertyName().equals(column.getColumnName())) {
column = column.withColumnName(column.getSelectorName() + "." + column.getColumnName());
}
newColumns.add(column);
org.modeshape.jcr.query.validate.Schemata.Column schemaColumn = table.getColumn(columnName);
if (schemaColumn != null) {
newTypes.add(schemaColumn.getPropertyTypeName());
} else {
newTypes.add(context.getTypeSystem().getDefaultType());
}
}
}
boolean validateColumnExistance = context.getHints().validateColumnExistance && !table.hasExtraColumns();
boolean columnNameIsWildcard = columnName == null || "*".equals(columnName);
if (!columnNameIsWildcard && table.getColumn(columnName) == null && validateColumnExistance) {
context.getProblems().addError(GraphI18n.columnDoesNotExistOnTable, columnName, tableName);
}
}
}
}
projectNode.setProperty(Property.PROJECT_COLUMNS, newColumns);
projectNode.setProperty(Property.PROJECT_COLUMN_TYPES, newTypes);
projectNode.addLastChild(plan);
return projectNode;
}
|
python
|
def set_default_locale(code: str) -> None:
"""Sets the default locale.
The default locale is assumed to be the language used for all strings
in the system. The translations loaded from disk are mappings from
the default locale to the destination locale. Consequently, you don't
need to create a translation file for the default locale.
"""
global _default_locale
global _supported_locales
_default_locale = code
_supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
|
java
|
void returnWorkPermit()
{
while (true)
{
long current = permits.get();
int workPermits = workPermits(current);
if (permits.compareAndSet(current, updateWorkPermits(current, workPermits + 1)))
return;
}
}
|
java
|
public static double J0(double x) {
double ax;
if ((ax = Math.abs(x)) < 8.0) {
double y = x * x;
double ans1 = 57568490574.0 + y * (-13362590354.0 + y * (651619640.7
+ y * (-11214424.18 + y * (77392.33017 + y * (-184.9052456)))));
double ans2 = 57568490411.0 + y * (1029532985.0 + y * (9494680.718
+ y * (59272.64853 + y * (267.8532712 + y * 1.0))));
return ans1 / ans2;
} else {
double z = 8.0 / ax;
double y = z * z;
double xx = ax - 0.785398164;
double ans1 = 1.0 + y * (-0.1098628627e-2 + y * (0.2734510407e-4
+ y * (-0.2073370639e-5 + y * 0.2093887211e-6)));
double ans2 = -0.1562499995e-1 + y * (0.1430488765e-3
+ y * (-0.6911147651e-5 + y * (0.7621095161e-6
- y * 0.934935152e-7)));
return Math.sqrt(0.636619772 / ax) *
(Math.cos(xx) * ans1 - z * Math.sin(xx) * ans2);
}
}
|
python
|
def remove(self, item):
""" Remove an item from the set, returning if it was present """
with self.lock:
if item in self.set:
self.set.remove(item)
return True
return False
|
java
|
public void finish() throws TransactionFailureException {
Preconditions.checkState(currentTx != null, "Cannot finish tx that has not been started");
// each of these steps will abort and rollback the tx in case if errors, and throw an exception
checkForConflicts();
persist();
commit();
postCommit();
currentTx = null;
}
|
java
|
public long getLastModified( Artifact artifact )
throws IOException, ArtifactNotFoundException
{
Entry entry = backing.get(
artifact.getGroupId().replace( '.', '/' ) + "/" + artifact.getArtifactId() + "/" + artifact.getVersion()
+ "/" + artifact.getName() );
if ( !( entry instanceof FileEntry ) )
{
throw new ArtifactNotFoundException( artifact );
}
return entry.getLastModified();
}
|
java
|
public static <K, V, A> A ifPresentApply(
Map<K, V> map,
K key,
Function<? super V, ? extends A> function)
{
if (map instanceof UnsortedMapIterable)
{
return ((MapIterable<K, V>) map).ifPresentApply(key, function);
}
V result = map.get(key);
return MapIterate.isAbsent(result, map, key) ? null : function.valueOf(result);
}
|
python
|
def Alt_Fn(self, n, dl = 0):
"""Alt + Fn1~12 组合键
"""
self.Delay(dl)
self.keyboard.press_key(self.keyboard.alt_key)
self.keyboard.tap_key(self.keyboard.function_keys[n])
self.keyboard.release_key(self.keyboard.alt_key)
|
python
|
def set_data(self, x=None, y=None, z=None, colors=None):
"""Update the data in this surface plot.
Parameters
----------
x : ndarray | None
1D array of values specifying the x positions of vertices in the
grid. If None, values will be assumed to be integers.
y : ndarray | None
1D array of values specifying the x positions of vertices in the
grid. If None, values will be assumed to be integers.
z : ndarray
2D array of height values for each grid vertex.
colors : ndarray
(width, height, 4) array of vertex colors.
"""
if x is not None:
if self._x is None or len(x) != len(self._x):
self.__vertices = None
self._x = x
if y is not None:
if self._y is None or len(y) != len(self._y):
self.__vertices = None
self._y = y
if z is not None:
if self._x is not None and z.shape[0] != len(self._x):
raise TypeError('Z values must have shape (len(x), len(y))')
if self._y is not None and z.shape[1] != len(self._y):
raise TypeError('Z values must have shape (len(x), len(y))')
self._z = z
if (self.__vertices is not None and
self._z.shape != self.__vertices.shape[:2]):
self.__vertices = None
if self._z is None:
return
update_mesh = False
new_vertices = False
# Generate vertex and face array
if self.__vertices is None:
new_vertices = True
self.__vertices = np.empty((self._z.shape[0], self._z.shape[1], 3),
dtype=np.float32)
self.generate_faces()
self.__meshdata.set_faces(self.__faces)
update_mesh = True
# Copy x, y, z data into vertex array
if new_vertices or x is not None:
if x is None:
if self._x is None:
x = np.arange(self._z.shape[0])
else:
x = self._x
self.__vertices[:, :, 0] = x.reshape(len(x), 1)
update_mesh = True
if new_vertices or y is not None:
if y is None:
if self._y is None:
y = np.arange(self._z.shape[1])
else:
y = self._y
self.__vertices[:, :, 1] = y.reshape(1, len(y))
update_mesh = True
if new_vertices or z is not None:
self.__vertices[..., 2] = self._z
update_mesh = True
if colors is not None:
self.__meshdata.set_vertex_colors(colors)
update_mesh = True
# Update MeshData
if update_mesh:
self.__meshdata.set_vertices(
self.__vertices.reshape(self.__vertices.shape[0] *
self.__vertices.shape[1], 3))
MeshVisual.set_data(self, meshdata=self.__meshdata)
|
java
|
protected static void checkMetricLabelName(String name) {
if (!METRIC_LABEL_NAME_RE.matcher(name).matches()) {
throw new IllegalArgumentException("Invalid metric label name: " + name);
}
if (RESERVED_METRIC_LABEL_NAME_RE.matcher(name).matches()) {
throw new IllegalArgumentException("Invalid metric label name, reserved for internal use: " + name);
}
}
|
java
|
public DebugLogWrapper createDebugLogWrapper(int logId) {
Preconditions.checkState(this.bookKeeper.get() != null, "BookKeeperLogFactory is not initialized.");
return new DebugLogWrapper(logId, this.zkClient, this.bookKeeper.get(), this.config, this.executor);
}
|
java
|
public static long getMacAddr() {
if (macAddr == 0) {
try {
InetAddress ip = InetAddress.getLocalHost();
NetworkInterface network = NetworkInterface.getByInetAddress(ip);
byte[] mac = network.getHardwareAddress();
for (byte temp : mac) {
macAddr = (macAddr << 8) | ((int) temp & 0xFF);
}
} catch (Exception e) {
macAddr = 0;
}
}
return macAddr;
}
|
java
|
private String encodeUri( String uri ) {
String newUri = "";
StringTokenizer st = new StringTokenizer( uri, "/ ", true );
while ( st.hasMoreTokens()) {
String tok = st.nextToken();
if ( tok.equals( "/" ))
newUri += "/";
else if ( tok.equals( " " ))
newUri += "%20";
else {
try {
newUri += URLEncoder.encode( tok, "UTF-8" );
} catch( UnsupportedEncodingException e ) {
throw Log.errRTExcept(e);
}
}
}
return newUri;
}
|
java
|
private String
getPreprocessorDefinitions(final CommandLineCompilerConfiguration compilerConfig, final boolean isDebug) {
final StringBuffer defines = new StringBuffer();
final String[] args = compilerConfig.getPreArguments();
for (final String arg : args) {
if (arg.startsWith("/D")) {
String macro = arg.substring(2);
if (isDebug) {
if (macro.equals("NDEBUG")) {
macro = "_DEBUG";
}
} else {
if (macro.equals("_DEBUG")) {
macro = "NDEBUG";
}
}
defines.append(macro);
defines.append(";");
}
}
if (defines.length() > 0) {
defines.setLength(defines.length() - 1);
}
return defines.toString();
}
|
python
|
def directive_DCB(self, label, params):
"""
label DCB value[, value ...]
Allocate a byte space in read only memory for the value or list of values
"""
# TODO make this read only
# TODO check for byte size
self.labels[label] = self.space_pointer
if params in self.equates:
params = self.equates[params]
self.memory[self.space_pointer] = self.convert_to_integer(params) & 0xFF
self.space_pointer += 1
|
java
|
public String button (String name, String text, String extra)
{
return fixedInput("button", name, text, extra);
}
|
python
|
def update_persistent_boot(self, device_type=[]):
"""Changes the persistent boot device order for the host
:param device_type: ordered list of boot devices
:raises: IloError, on an error from iLO.
:raises: IloCommandNotSupportedError, if the command is not supported
on the server.
"""
# Check if the input is valid
for item in device_type:
if item.upper() not in DEVICE_COMMON_TO_RIS:
raise exception.IloInvalidInputError("Invalid input. Valid "
"devices: NETWORK, HDD,"
" ISCSI or CDROM.")
self._update_persistent_boot(device_type, persistent=True)
|
python
|
def use_plenary_activity_view(self):
"""Pass through to provider ActivityLookupSession.use_plenary_activity_view"""
self._object_views['activity'] = PLENARY
# self._get_provider_session('activity_lookup_session') # To make sure the session is tracked
for session in self._get_provider_sessions():
try:
session.use_plenary_activity_view()
except AttributeError:
pass
|
java
|
@Override
public final void invoke() {
if (index < inters.length)
inters[index++].intercept(this);
else if (index++ == inters.length)
invocation.invoke();
}
|
python
|
def group2commlst(commlst, glist):
"""add group info to commlst"""
for (gname, objname), commitem in zip(glist, commlst):
newitem1 = "group %s" % (gname, )
newitem2 = "idfobj %s" % (objname, )
commitem[0].insert(0, newitem1)
commitem[0].insert(1, newitem2)
return commlst
|
java
|
@Override
public double getInclusionProbability(Instance instance) {
for (int i = 0 ; i < this.dimensions ; i++)
{
if ((int) instance.value(i) != this.coordinates[i])
return 0.0;
}
return 1.0;
}
|
java
|
@Override
public Value caseASetEnumSetExp(ASetEnumSetExp node, Context ctxt)
throws AnalysisException
{
BreakpointManager.getBreakpoint(node).check(node.getLocation(), ctxt);
ValueSet values = new ValueSet();
for (PExp e : node.getMembers())
{
values.add(e.apply(VdmRuntime.getExpressionEvaluator(), ctxt));
}
return new SetValue(values);
}
|
python
|
def impulse_response(self, j=5):
r"""
Pulls off the imuplse response coefficients to a shock
in :math:`w_{t}` for :math:`x` and :math:`y`
Important to note: We are uninterested in the shocks to
v for this method
* :math:`x` coefficients are :math:`C, AC, A^2 C...`
* :math:`y` coefficients are :math:`GC, GAC, GA^2C...`
Parameters
----------
j : Scalar(int)
Number of coefficients that we want
Returns
-------
xcoef : list(array_like(float, 2))
The coefficients for x
ycoef : list(array_like(float, 2))
The coefficients for y
"""
# Pull out matrices
A, C, G, H = self.A, self.C, self.G, self.H
Apower = np.copy(A)
# Create room for coefficients
xcoef = [C]
ycoef = [np.dot(G, C)]
for i in range(j):
xcoef.append(np.dot(Apower, C))
ycoef.append(np.dot(G, np.dot(Apower, C)))
Apower = np.dot(Apower, A)
return xcoef, ycoef
|
java
|
void onSurfaceCreated() {
Log.v(TAG, "onSurfaceCreated");
Thread currentThread = Thread.currentThread();
// Reduce contention with other Android processes
currentThread.setPriority(Thread.MAX_PRIORITY);
// we know that the current thread is a GL one, so we store it to
// prevent non-GL thread from calling GL functions
mGLThreadID = currentThread.getId();
// Evaluating anisotropic support on GL Thread
String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS);
isAnisotropicSupported = extensions.contains("GL_EXT_texture_filter_anisotropic");
// Evaluating max anisotropic value if supported
if (isAnisotropicSupported) {
maxAnisotropicValue = NativeTextureParameters.getMaxAnisotropicValue();
}
mPreviousTimeNanos = GVRTime.getCurrentTime();
final GVRScene scene = null == mMainScene ? new GVRScene(GVRViewManager.this) : mMainScene;
setMainSceneImpl(scene);
mRenderBundle = makeRenderBundle();
final DepthFormat depthFormat = mApplication.getAppSettings().getEyeBufferParams().getDepthFormat();
mApplication.getConfigurationManager().configureRendering(DepthFormat.DEPTH_24_STENCIL_8 == depthFormat);
}
|
java
|
@Override
public CreateOpenIDConnectProviderResult createOpenIDConnectProvider(CreateOpenIDConnectProviderRequest request) {
request = beforeClientExecution(request);
return executeCreateOpenIDConnectProvider(request);
}
|
java
|
@SuppressWarnings("unchecked") public <V> Mapper<String, V> mapper() {
if (!isChanged && nameValMap.isPresent()) {
return (Mapper<String, V>) nameValMap.get();
}
Map<String, Object> fm = Maps.newHashMap();
if (fieldHolder.isPresent()) {
fieldLoop(new TransformFields2Map<Object>(fm, this.trace));
}
nameValMap = Optional.of(Mapper.from(fm));
this.isChanged = false;
return (Mapper<String, V>) nameValMap.get();
}
|
python
|
def copy_neg(self):
""" Return a copy of self with the opposite sign bit.
Unlike -self, this does not make use of the context: the result
has the same precision as the original.
"""
result = mpfr.Mpfr_t.__new__(BigFloat)
mpfr.mpfr_init2(result, self.precision)
new_sign = not self._sign()
mpfr.mpfr_setsign(result, self, new_sign, ROUND_TIES_TO_EVEN)
return result
|
java
|
public static <T> ReservoirItemsSketch<T> newInstance(final int k, final ResizeFactor rf) {
return new ReservoirItemsSketch<>(k, rf);
}
|
java
|
public void marshall(ResourcePath resourcePath, ProtocolMarshaller protocolMarshaller) {
if (resourcePath == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(resourcePath.getComponents(), COMPONENTS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def get_predicate_indices(tags: List[str]) -> List[int]:
"""
Return the word indices of a predicate in BIO tags.
"""
return [ind for ind, tag in enumerate(tags) if 'V' in tag]
|
python
|
def _read_decimal(data, size, writer_schema):
"""
based on https://github.com/apache/avro/pull/82/
"""
scale = writer_schema.get('scale', 0)
precision = writer_schema['precision']
datum_byte = str2ints(data)
unscaled_datum = 0
msb = fstint(data)
leftmost_bit = (msb >> 7) & 1
if leftmost_bit == 1:
modified_first_byte = datum_byte[0] ^ (1 << 7)
datum_byte = [modified_first_byte] + datum_byte[1:]
for offset in xrange(size):
unscaled_datum <<= 8
unscaled_datum += datum_byte[offset]
unscaled_datum += pow(-2, (size * 8) - 1)
else:
for offset in xrange(size):
unscaled_datum <<= 8
unscaled_datum += (datum_byte[offset])
with localcontext() as ctx:
ctx.prec = precision
scaled_datum = Decimal(unscaled_datum).scaleb(-scale)
return scaled_datum
|
java
|
private void addPostParams(final Request request) {
if (identity != null) {
request.addPostParam("Identity", identity);
}
if (userAddress != null) {
request.addPostParam("UserAddress", userAddress);
}
if (attributes != null) {
request.addPostParam("Attributes", attributes);
}
if (twilioAddress != null) {
request.addPostParam("TwilioAddress", twilioAddress);
}
if (dateCreated != null) {
request.addPostParam("DateCreated", dateCreated.toString());
}
if (dateUpdated != null) {
request.addPostParam("DateUpdated", dateUpdated.toString());
}
}
|
python
|
def recordHostname(self, basedir):
"Record my hostname in twistd.hostname, for user convenience"
log.msg("recording hostname in twistd.hostname")
filename = os.path.join(basedir, "twistd.hostname")
try:
hostname = os.uname()[1] # only on unix
except AttributeError:
# this tends to fail on non-connected hosts, e.g., laptops
# on planes
hostname = socket.getfqdn()
try:
with open(filename, "w") as f:
f.write("{0}\n".format(hostname))
except Exception:
log.msg("failed - ignoring")
|
python
|
def _dirdiffcopyandupdate(self, dir1, dir2):
"""
Private function which does directory diff, copy and update (synchro)
"""
self._dowork(dir1, dir2, self._copy, self._update)
|
python
|
def _set_acl_state(self, v, load=False):
"""
Setter method for acl_state, mapped from YANG variable /acl_state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_acl_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_acl_state() directly.
YANG Description: Vxlan ACL information
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=acl_state.acl_state, is_container='container', presence=False, yang_name="acl-state", rest_name="acl-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ssm-acl', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-ssm-operational', defining_module='brocade-ssm-operational', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """acl_state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=acl_state.acl_state, is_container='container', presence=False, yang_name="acl-state", rest_name="acl-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ssm-acl', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-ssm-operational', defining_module='brocade-ssm-operational', yang_type='container', is_config=True)""",
})
self.__acl_state = t
if hasattr(self, '_set'):
self._set()
|
java
|
@Override
public ListClustersResult listClusters(ListClustersRequest request) {
request = beforeClientExecution(request);
return executeListClusters(request);
}
|
java
|
private void handleFragmentResponseMessage(FragmentResponseMessage message)
{
if (isFragmentMisrouted(message)){
m_mailbox.send(message.getDestinationSiteId(), message);
return;
}
final VoltTrace.TraceEventBatch traceLog = VoltTrace.log(VoltTrace.Category.SPI);
// Send the message to the duplicate counter, if any
DuplicateCounter counter =
m_duplicateCounters.get(new DuplicateCounterKey(message.getTxnId(), message.getSpHandle()));
final TransactionState txn = m_outstandingTxns.get(message.getTxnId());
if (counter != null) {
String traceName = "recvfragment";
if (message.m_sourceHSId != m_mailbox.getHSId()) {
traceName = "replicatefragment";
}
String finalTraceName = traceName;
if (traceLog != null) {
traceLog.add(() -> VoltTrace.endAsync(finalTraceName, MiscUtils.hsIdPairTxnIdToString(m_mailbox.getHSId(), message.m_sourceHSId, message.getSpHandle(), message.getTxnId()),
"status", message.getStatusCode()));
}
int result = counter.offer(message);
if (result == DuplicateCounter.DONE) {
if (txn != null && txn.isDone()) {
setRepairLogTruncationHandle(txn.m_spHandle, txn.isLeaderMigrationInvolved());
}
m_duplicateCounters.remove(new DuplicateCounterKey(message.getTxnId(), message.getSpHandle()));
FragmentResponseMessage resp = (FragmentResponseMessage)counter.getLastResponse();
// MPI is tracking deps per partition HSID. We need to make
// sure we write ours into the message getting sent to the MPI
resp.setExecutorSiteId(m_mailbox.getHSId());
m_mailbox.send(counter.m_destinationId, resp);
}
else if (result == DuplicateCounter.MISMATCH) {
VoltDB.crashGlobalVoltDB("HASH MISMATCH running multi-part procedure.", true, null);
} else if (result == DuplicateCounter.ABORT) {
VoltDB.crashGlobalVoltDB("PARTIAL ROLLBACK/ABORT running multi-part procedure.", true, null);
}
// doing duplicate suppression: all done.
return;
}
// No k-safety means no replica: read/write queries on master.
// K-safety: read-only queries (on master) or write queries (on replica).
if ( (m_isLeader || (!m_isLeader && message.isExecutedOnPreviousLeader()))
&& m_sendToHSIds.length > 0 && message.getRespBufferable()
&& (txn == null || txn.isReadOnly()) ) {
// on k-safety leader with safe reads configuration: one shot reads + normal multi-fragments MP reads
// we will have to buffer these reads until previous writes acked in the cluster.
long readTxnId = txn == null ? message.getSpHandle() : txn.m_spHandle;
m_bufferedReadLog.offer(m_mailbox, message, readTxnId, m_repairLogTruncationHandle);
return;
}
// for complete writes txn, we will advance the transaction point
if (txn != null && !txn.isReadOnly() && txn.isDone()) {
setRepairLogTruncationHandle(txn.m_spHandle, message.isExecutedOnPreviousLeader());
}
if (traceLog != null) {
traceLog.add(() -> VoltTrace.endAsync("recvfragment", MiscUtils.hsIdPairTxnIdToString(m_mailbox.getHSId(), message.m_sourceHSId, message.getSpHandle(), message.getTxnId()),
"status", message.getStatusCode()));
}
m_mailbox.send(message.getDestinationSiteId(), message);
}
|
java
|
public void setCollapsed (boolean collapse)
{
if (collapse) {
_content.setVisible(false);
_trigger.setIcon(_downIcon);
} else {
_content.setVisible(true);
_trigger.setIcon(_upIcon);
}
SwingUtil.refresh(this);
}
|
java
|
private void buildGui(RibbonBarInfo barInfo, MapWidget mapWidget) {
if (null == barInfo) {
throw new IllegalArgumentException("RibbonBarLayout cannot be built without RibbonBarInfo configuration.");
}
for (RibbonGroupInfo groupInfo : barInfo.getGroups()) {
RibbonGroupLayout group = new RibbonGroupLayout(groupInfo.getTitle());
for (RibbonColumnInfo columnInfo : groupInfo.getColumns()) {
RibbonColumn ribbonColumn = RibbonColumnRegistry.getRibbonColumn(columnInfo.getType(),
columnInfo.getTools(), columnInfo.getParameters(), mapWidget);
if (ribbonColumn != null) {
group.addColumn(ribbonColumn);
}
}
addGroup(group);
}
}
|
python
|
def horizontal_border(self, _, outer_widths):
"""Handle the GitHub heading border.
E.g.:
|:---|:---:|---:|----|
:param _: Unused.
:param iter outer_widths: List of widths (with padding) for each column.
:return: Prepared border strings in a generator.
:rtype: iter
"""
horizontal = str(self.CHAR_INNER_HORIZONTAL)
left = self.CHAR_OUTER_LEFT_VERTICAL
intersect = self.CHAR_INNER_VERTICAL
right = self.CHAR_OUTER_RIGHT_VERTICAL
columns = list()
for i, width in enumerate(outer_widths):
justify = self.justify_columns.get(i)
width = max(3, width) # Width should be at least 3 so justification can be applied.
if justify == 'left':
columns.append(':' + horizontal * (width - 1))
elif justify == 'right':
columns.append(horizontal * (width - 1) + ':')
elif justify == 'center':
columns.append(':' + horizontal * (width - 2) + ':')
else:
columns.append(horizontal * width)
return combine(columns, left, intersect, right)
|
java
|
@Override
public int docAppearedIn(String word) {
T element = extendedVocabulary.get(word);
if (element != null) {
return (int) element.getSequencesCount();
} else
return -1;
}
|
python
|
def _on_work_finished(self, results):
"""
Display results.
:param status: Response status
:param results: Response data, messages.
"""
messages = []
for msg in results:
msg = CheckerMessage(*msg)
if msg.line >= self.editor.blockCount():
msg.line = self.editor.blockCount() - 1
block = self.editor.document().findBlockByNumber(msg.line)
msg.block = block
messages.append(msg)
self.add_messages(messages)
|
python
|
def channels(self):
"""Output channels"""
try:
return self._channels
except AttributeError:
logger.debug("initialize output channels ...")
channels = self.args.channels
config_channels = [sec.rpartition('_')[0] for sec in self.config.sections(suffix='_channel')]
unknown = set(channels) - set(config_channels)
if unknown:
raise ValueError("undefined channel %r" % list(unknown))
output_channels = []
for channel in set(channels):
channel_type = self.config.get('%s_channel' % channel, 'type')
if channel_type == 'tty':
output_channels.append(TermChannel(channel, self.args, self.config))
elif channel_type == 'file':
output_channels.append(FileChannel(channel, self.args, self.config))
elif channel_type == 'mail':
output_channels.append(MailChannel(channel, self.args, self.config))
else:
raise LogRaptorConfigError('unknown channel type %r' % channel_type)
return output_channels
|
python
|
def _ncc_c_3dim(x, y):
"""
Variant of NCCc that operates with 2 dimensional X arrays and 2 dimensional
y vector
Returns a 3 dimensional array of normalized fourier transforms
"""
den = norm(x, axis=1)[:, None] * norm(y, axis=1)
den[den == 0] = np.Inf
x_len = x.shape[-1]
fft_size = 1 << (2*x_len-1).bit_length()
cc = ifft(fft(x, fft_size) * np.conj(fft(y, fft_size))[:, None])
cc = np.concatenate((cc[:,:,-(x_len-1):], cc[:,:,:x_len]), axis=2)
return np.real(cc) / den.T[:, :, None]
|
python
|
def setup(self):
"""Load backends and plugins.
:returns: A `BuildConfiguration` object constructed during backend/plugin loading.
"""
return self._load_plugins(
self._working_set,
self._bootstrap_options.pythonpath,
self._bootstrap_options.plugins,
self._bootstrap_options.backend_packages
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.