language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public static Single<XianTransaction> getTransaction(String transactionId, boolean readOnly) {
if (BaseLocalTransaction.getExistedLocalTrans(transactionId) != null) {
return Single.just(BaseLocalTransaction.getExistedLocalTrans(transactionId));
}
if (readOnly) {
return PoolFactory.getPool().getSlaveDatasource().getConnection()
.map(connection -> connection.createTransaction(transactionId));
} else {
return PoolFactory.getPool().getMasterDatasource().getConnection()
.map(connection -> connection.createTransaction(transactionId));
}
}
|
python
|
def _parse_simple_model(topology, parent_scope, model, inputs, outputs):
'''
Parse a model containing only one operator (aka simple model).
Steps:
1. Create local scope for allocating local variables and operators
2. Create operator and then feed the model's inputs and outputs to the operator
3. Connect local variables and their corresponding parent variables
Note:
1. Notice that a CoreML operator can contain no input and output, so we directly use model's inputs (outputs).
2. Input and output names can be identical in CoreML, but they must be different for ONNX.
'''
# Create local scope for the considered model
scope = topology.declare_scope('single', [parent_scope] + parent_scope.parent_scopes)
# Create operator for the considered model
this_operator = scope.declare_local_operator(model.WhichOneof('Type'), model)
# Allocate inputs for the operator and then connect them with inputs from outside
for var in model.description.input:
# We assume that no duplicated raw name exists. Note that we set prepend=True because model inputs should
# not hide any intermediate variables.
variable = scope.declare_local_variable(
var.name, _parse_coreml_feature(var, topology.target_opset, topology.default_batch_size),
prepend=True)
this_operator.inputs.append(variable)
# Connect local variables and variables passed into this scope. Our assumptions are described below.
# 1. Assume a variable with 'A' as its CoreML name is passed in. There must be at least one local variable gets a
# raw name 'A'. That is, for each parent variable, at least one local duplicate is available.
# 2. It's possible to find multiple local variables associated with the same raw name. For example, raw name 'A' can
# be associated with 'A' and 'A1' in ONNX. In this case, we connect the first one to parent input.
for parent_variable in inputs:
raw_name = parent_variable.raw_name
child_variable = scope.variables[scope.variable_name_mapping[raw_name][0]]
operator = scope.declare_local_operator('identity')
operator.inputs.append(parent_variable)
operator.outputs.append(child_variable)
# Allocate outputs for the operator and then connect them with outputs from outside
for var in model.description.output:
# We assume that no duplicated output raw name exists.
variable = scope.declare_local_variable(
var.name, _parse_coreml_feature(var, topology.target_opset, topology.default_batch_size))
this_operator.outputs.append(variable)
# Connect local variables and variables passed into this scope. Our assumptions are described below.
# 1. Assume a variable with 'A' as its CoreML name is passed in. There must be at least one local variable gets a
# raw name 'A'. That is, for each parent variable, at least one local duplicate is available.
# 2. It's possible to find multiple local variables associated with the same raw name. For example, raw name 'A' can
# be associated with 'A' and 'A1' in ONNX. In this case, we connect the last one to parent output.
for parent_variable in outputs:
raw_name = parent_variable.raw_name
child_variable = scope.variables[scope.variable_name_mapping[raw_name][-1]]
operator = scope.declare_local_operator('identity')
operator.inputs.append(child_variable)
operator.outputs.append(parent_variable)
|
java
|
public static Long getLong(String name, Long defaultValue) {
return Long.getLong(name, defaultValue);
}
|
java
|
public void addSubFileFilter()
{
this.getMainRecord().addListener(new SubFileFilter(ProjectTaskPredecessor.PROJECT_TASK_ID_KEY, this.getHeaderRecord().getField(ProjectTask.ID)));
}
|
java
|
public CMAAssetFile setUploadFrom(CMALink uploadFrom) {
this.uploadFrom = uploadFrom;
this.upload = null;
this.url = null;
this.details = null;
return this;
}
|
python
|
def destination_present(name,
physical,
restype='queue',
description='',
enabled=True,
server=None):
'''
Ensures that the JMS Destination Resource (queue or topic) is present
name
The JMS Queue/Topic name
physical
The Physical destination name
restype
The JMS Destination resource type, either ``queue`` or ``topic``, defaults is ``queue``
description
A description of the resource
enabled
Defaults to ``True``
'''
ret = {'name': name, 'result': None, 'comment': None, 'changes': {}}
params = {}
# Set parameters dict
if restype == 'queue':
params['resType'] = 'javax.jms.Queue'
params['className'] = 'com.sun.messaging.Queue'
elif restype == 'topic':
params['resType'] = 'javax.jms.Topic'
params['className'] = 'com.sun.messaging.Topic'
else:
ret['result'] = False
ret['comment'] = 'Invalid restype'
return ret
params['properties'] = {'Name': physical}
params['description'] = description
params['enabled'] = enabled
jms_ret = _do_element_present(name, 'admin_object_resource', params, server)
if not jms_ret['error']:
if not __opts__['test']:
ret['result'] = True
if jms_ret['create'] and __opts__['test']:
ret['comment'] = 'JMS Queue set to be created'
elif jms_ret['create']:
ret['changes'] = jms_ret['changes']
ret['comment'] = 'JMS queue created'
elif jms_ret['update'] and __opts__['test']:
ret['comment'] = 'JMS Queue set to be updated'
elif jms_ret['update']:
ret['changes'] = jms_ret['changes']
ret['comment'] = 'JMS Queue updated'
else:
ret['result'] = True
ret['comment'] = 'JMS Queue already up-to-date'
else:
ret['result'] = False
ret['comment'] = 'Error from API: {0}'.format(jms_ret['error'])
return ret
|
java
|
private synchronized void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
// todo: for multithreaded usage this block needs to be synchronized
in.defaultReadObject();
initialize();
// only deserialize child if not using a peer
if (this.equals(getPeer())) {
readChildren(in);
}
int listenerCount = in.readInt();
for (int i = 0; i < listenerCount; i++) {
addBeanContextMembershipListener((BeanContextMembershipListener) in.readObject());
}
// end synchronized block
}
|
java
|
public URLConnection getResourceConnection(String resourceName) throws ResourceException {
// Get the URLConnection
URLConnection groovyScriptConn = null;
ResourceException se = null;
for (URL root : roots) {
URL scriptURL = null;
try {
scriptURL = new URL(root, resourceName);
groovyScriptConn = openConnection(scriptURL);
break; // Now this is a bit unusual
} catch (MalformedURLException e) {
String message = "Malformed URL: " + root + ", " + resourceName;
if (se == null) {
se = new ResourceException(message);
} else {
se = new ResourceException(message, se);
}
} catch (IOException e1) {
String message = "Cannot open URL: " + root + resourceName;
groovyScriptConn = null;
if (se == null) {
se = new ResourceException(message);
} else {
se = new ResourceException(message, se);
}
}
}
if (se == null) se = new ResourceException("No resource for " + resourceName + " was found");
// If we didn't find anything, report on all the exceptions that occurred.
if (groovyScriptConn == null) throw se;
return groovyScriptConn;
}
|
python
|
def shot_open_callback(self, *args, **kwargs):
"""Callback for the shot open button
:returns: None
:rtype: None
:raises: None
"""
tf = self.browser.get_current_selection(1)
if not tf:
return
if not os.path.exists(tf.path):
msg = 'The selected shot does not exist: %s' % tf.path
log.error(msg)
self.statusbar.showMessage(msg)
return
js = JukeboxSignals.get()
js.before_open_shot.emit(tf)
self.open_shot(tf)
js.after_open_shot.emit(tf)
|
python
|
def available_resources(self):
"""Get the current available cluster resources.
This is different from `cluster_resources` in that this will return
idle (available) resources rather than total resources.
Note that this information can grow stale as tasks start and finish.
Returns:
A dictionary mapping resource name to the total quantity of that
resource in the cluster.
"""
available_resources_by_id = {}
subscribe_clients = [
redis_client.pubsub(ignore_subscribe_messages=True)
for redis_client in self.redis_clients
]
for subscribe_client in subscribe_clients:
subscribe_client.subscribe(ray.gcs_utils.XRAY_HEARTBEAT_CHANNEL)
client_ids = self._live_client_ids()
while set(available_resources_by_id.keys()) != client_ids:
for subscribe_client in subscribe_clients:
# Parse client message
raw_message = subscribe_client.get_message()
if (raw_message is None or raw_message["channel"] !=
ray.gcs_utils.XRAY_HEARTBEAT_CHANNEL):
continue
data = raw_message["data"]
gcs_entries = (
ray.gcs_utils.GcsTableEntry.GetRootAsGcsTableEntry(
data, 0))
heartbeat_data = gcs_entries.Entries(0)
message = (ray.gcs_utils.HeartbeatTableData.
GetRootAsHeartbeatTableData(heartbeat_data, 0))
# Calculate available resources for this client
num_resources = message.ResourcesAvailableLabelLength()
dynamic_resources = {}
for i in range(num_resources):
resource_id = decode(message.ResourcesAvailableLabel(i))
dynamic_resources[resource_id] = (
message.ResourcesAvailableCapacity(i))
# Update available resources for this client
client_id = ray.utils.binary_to_hex(message.ClientId())
available_resources_by_id[client_id] = dynamic_resources
# Update clients in cluster
client_ids = self._live_client_ids()
# Remove disconnected clients
for client_id in available_resources_by_id.keys():
if client_id not in client_ids:
del available_resources_by_id[client_id]
# Calculate total available resources
total_available_resources = defaultdict(int)
for available_resources in available_resources_by_id.values():
for resource_id, num_available in available_resources.items():
total_available_resources[resource_id] += num_available
# Close the pubsub clients to avoid leaking file descriptors.
for subscribe_client in subscribe_clients:
subscribe_client.close()
return dict(total_available_resources)
|
java
|
public static Node eq(Node expr1, Node expr2) {
return binaryOp(Token.EQ, expr1, expr2);
}
|
python
|
def verify_repo_matches_url(repo, url):
"""Verify ``url`` is a part of ``repo``.
We were using ``startswith()`` for a while, which isn't a good comparison.
This function allows us to ``urlparse`` and compare host and path.
Args:
repo (str): the repo url
url (str): the url to verify is part of the repo
Returns:
bool: ``True`` if the repo matches the url.
"""
repo_parts = urlparse(repo)
url_parts = urlparse(url)
errors = []
repo_path_parts = repo_parts.path.split('/')
url_path_parts = url_parts.path.split('/')
if repo_parts.hostname != url_parts.hostname:
errors.append("verify_repo_matches_url: Hostnames don't match! {} {}".format(
repo_parts.hostname, url_parts.hostname
))
if not url_parts.path.startswith(repo_parts.path) or \
url_path_parts[:len(repo_path_parts)] != repo_path_parts:
errors.append("verify_repo_matches_url: Paths don't match! {} {}".format(
repo_parts.path, url_parts.path
))
if errors:
log.warning("\n".join(errors))
return False
return True
|
java
|
public static WriteFutureEx newNotWrittenFuture(IoSession session, Throwable cause) {
DefaultWriteFutureEx unwrittenFuture = new DefaultWriteFutureEx(session);
unwrittenFuture.setException(cause);
return unwrittenFuture;
}
|
python
|
def root_endpoint(request, format=None):
"""
List of all the available resources of this RESTful API.
"""
endpoints = []
# loop over url modules
for urlmodule in urlpatterns:
# is it a urlconf module?
if hasattr(urlmodule, 'urlconf_module'):
is_urlconf_module = True
else:
is_urlconf_module = False
# if url is really a urlmodule
if is_urlconf_module:
# loop over urls of that module
for url in urlmodule.urlconf_module.urlpatterns:
# TODO: configurable skip url in settings
# skip api-docs url
if url.name in ['django.swagger.resources.view']:
continue
# try adding url to list of urls to show
try:
endpoints.append({
'name': url.name.replace('api_', ''),
'url': reverse(url.name, request=request, format=format)
})
# urls of object details will fail silently (eg: /nodes/<slug>/)
except NoReverseMatch:
pass
return Response(endpoints)
|
python
|
def auth_string(self):
'''
Authenticate based on username and token which is base64-encoded
'''
username_token = '{username}:{token}'.format(username=self.username, token=self.token)
b64encoded_string = b64encode(username_token)
auth_string = 'Token {b64}'.format(b64=b64encoded_string)
return auth_string
|
java
|
public final boolean hasInterface(final SgClass intf) {
if (intf == null) {
throw new IllegalArgumentException("The argument 'intf' cannot be null!");
}
if (!intf.isInterface()) {
throw new IllegalArgumentException(
"The argument 'intf' is a class an not an interface!");
}
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces.get(i).equals(intf)) {
return true;
}
}
if (superClass != null) {
return superClass.hasInterface(intf);
}
return false;
}
|
java
|
public static HierarchicalConfiguration getHierarchicalConfiguration(final Configuration configuration) {
if (configuration instanceof CompositeConfiguration) {
final CompositeConfiguration compositeConfig = (CompositeConfiguration) configuration;
for (int i = 0; i < compositeConfig.getNumberOfConfigurations(); i++) {
if (compositeConfig.getConfiguration(i) instanceof HierarchicalConfiguration) {
return (HierarchicalConfiguration) compositeConfig.getConfiguration(i);
}
}
}
// maybe I need to send a runtime exception ??
// throw new
// ConfigurationRuntimeException("no hierarchical configuration was defined");
return null;
}
|
java
|
public void print(String s) throws IOException
{
//PK89810, If WCCustomProperties.FINISH_RESPONSE_ON_CLOSE is set and stream is already obtained and closed.
// We will not allow user to print more data if the above case is met, outputstreamClosed will be true and default is false.
if (!(WCCustomProperties.FINISH_RESPONSE_ON_CLOSE) || !outputstreamClosed ) //PK89810
{
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"print", "print --> "+s);
}
if(s != null) { // PQ88872
if (!_hasWritten && obs != null)
{
_hasWritten = true;
if (obsList!=null){
for (int i=0;i<obsList.size();i++){
obsList.get(i).alertFirstWrite();
}
}
else {
obs.alertFirstWrite();
}
}
int len = s.length();
if (limit > -1)
{
if (total + len > limit)
{
len = limit - total;
except = new WriteBeyondContentLengthException();
}
}
if(this.getBufferSize() != 0){ // PM44112
int off = 0;
while (len > 0)
{
int n = buf.length - count;
if (n == 0)
{
response.setFlushMode(false);
flushBytes();
response.setFlushMode(true);
n = buf.length - count;
}
if (n > len)
{
n = len;
}
//
// NOTE: this getBytes call is deprecated ... it doesn't work
// correctly for multibyte characters, and doesn't take account
// of the character encoding that the client expects.
//
s.getBytes(off, off + n, buf, count);
count += n;
total += n;
off += n;
len -= n;
}
} // PM44112 Start
else if(getBufferSize() == 0) {
byte[] origBuf=buf; // store the original buff , which is 0
buf = s.getBytes(); // update buf with the String to print.
count = buf.length;
total+=count; // keep the count and the total written
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE))
logger.logp(Level.FINE, CLASS_NAME,"print", "response Buffersize is set to zero , to print --> "+ count +" total -->"+ total);
response.setFlushMode(true);
flushBytes();
buf=origBuf; // set it back to original buff, i.e. 0 value
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE))
logger.logp(Level.FINE, CLASS_NAME,"print", "after flushbytes, buf --> "+ buf.length +" count --> "+ count+" total -->"+ total);
}// PM44112 End
check();
} // PQ88872
}
else{//PK89810 start
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE))
logger.logp(Level.FINE, CLASS_NAME,"print", "print not allowed, outputstreamClosed value --> " + outputstreamClosed);
}//PK89810 End
}
|
java
|
@Override
public int[] columns(String[] names) {
int[] idxs = new int[_strs.length];
for (int i = 0; i < _strs.length; i++) {
int idx = idxs[i] = water.util.ArrayUtils.find(names, _strs[i]);
if (idx == -1) throw new IllegalArgumentException("Column " + _strs[i] + " not found");
}
return idxs;
}
|
python
|
def negative_report(binary_report, sha256hash, project, patch_file):
"""
If no match is made and file is clean
"""
report_url = binary_report['permalink']
scan_date = binary_report['scan_date']
logger.info('File scan date for %s shows a clean status on: %s', patch_file, scan_date)
logger.info('Full report avaliable here: %s', report_url)
logger.info('The following sha256 hash can be used in your %s.yaml file to suppress this scan:', project)
logger.info('%s', sha256hash)
with open(reports_dir + "binaries-" + project + ".log", "a") as gate_report:
gate_report.write('Non Whitelisted Binary: {}\n'.format(patch_file))
gate_report.write('File scan date for {} shows a clean status on {}\n'.format(patch_file, scan_date))
gate_report.write('The following sha256 hash can be used in your {}.yaml file to suppress this scan:\n'.format(project))
gate_report.write('{}\n'.format(sha256hash))
|
java
|
static ExecutorService startClientService(SystemMain system, ClientType clientType, AtomicInteger jobCounter) {
switch (clientType) {
case ALERT:
return startAlertClientService(system, jobCounter);
case COMMIT_SCHEMA:
/* Alpha feature, not currently supported. */
return startCommitSchemaClientService(system, jobCounter);
case COMMIT_ANNOTATIONS:
return startCommitAnnotationsClientService(system, jobCounter);
case PROCESS_QUERIES:
return startProcessMetricsClientService(system, jobCounter);
default:
return startCommitMetricsClientService(system, jobCounter);
}
}
|
java
|
public void setOnNonEmptyWorkDirectory(String value) {
try {
this.nonEmptyWorkDirAction = NonEmptyWorkDirectoryAction.valueOf(value.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("OnNonEmptyWorkDirectory accepts any of: "
+ Arrays.toString(NonEmptyWorkDirectoryAction.values()) + ", value is not valid: " + value);
}
}
|
python
|
def validateFilepath( self ):
"""
Alters the color scheme based on the validation settings.
"""
if ( not self.isValidated() ):
return
valid = self.isValid()
if ( not valid ):
fg = self.invalidForeground()
bg = self.invalidBackground()
else:
fg = self.validForeground()
bg = self.validBackground()
palette = self.palette()
palette.setColor(palette.Base, bg)
palette.setColor(palette.Text, fg)
self._filepathEdit.setPalette(palette)
|
python
|
def render_impl(self, template, context, **opts):
"""
Render given template file and return the result.
:param template: Template file path
:param context: A dict or dict-like object to instantiate given
template file
:param opts: Options such as:
- at_paths: Template search paths
- at_encoding: Template encoding
- other keyword options passed to jinja2.Envrionment. Please note
that 'loader' option is not supported because anytemplate does
not support to load template except for files
:return: Rendered string
"""
return self._render(os.path.basename(template), context, True, **opts)
|
java
|
private int advanceChar() throws JsonParserException {
if (eof)
return -1;
int c = string.charAt(index);
if (c == '\n') {
linePos++;
rowPos = index + 1;
utf8adjust = 0;
}
index++;
if (index >= bufferLength)
eof = true;
return c;
}
|
java
|
private WebTarget createWebTarget(String restPath, Map<String, String> queryParams) {
WebTarget webTarget;
try {
URI u = new URI(this.baseURI + "/plugins/restapi/v1/" + restPath);
Client client = createRestClient();
webTarget = client.target(u);
if (queryParams != null && !queryParams.isEmpty()) {
for (Map.Entry<String, String> entry : queryParams.entrySet()) {
if (entry.getKey() != null && entry.getValue() != null) {
LOG.debug("PARAM: {} = {}", entry.getKey(), entry.getValue());
webTarget = webTarget.queryParam(entry.getKey(), entry.getValue());
}
}
}
} catch (Exception e) {
throw new IllegalArgumentException("Something went wrong by creating the client: " + e);
}
return webTarget;
}
|
java
|
public void setNoteUpdatedAt(java.util.Collection<DateFilter> noteUpdatedAt) {
if (noteUpdatedAt == null) {
this.noteUpdatedAt = null;
return;
}
this.noteUpdatedAt = new java.util.ArrayList<DateFilter>(noteUpdatedAt);
}
|
java
|
public static int getLineCount(String str) {
if (null == str || str.isEmpty()) {
return 0;
}
int count = 1;
for (char c : str.toCharArray()) {
if ('\n' == c) {
count++;
}
}
return count;
}
|
python
|
def sortData( self, column ):
"""
Returns the data to be used when sorting. If no sort data has been
explicitly defined, then the value in the EditRole for this item's
column will be used.
:param column | <int>
:return <variant>
"""
value = unwrapVariant(self.data(column, self.SortRole))
if value is None:
return None
return unwrapVariant(self.data(column, QtCore.Qt.EditRole))
|
java
|
private String nameHtml(final RenderingContext context,
final Person person) {
return nameRenderer(context, person).getNameHtml();
}
|
python
|
def get_isp(self, ip):
''' Get isp '''
rec = self.get_all(ip)
return rec and rec.isp
|
java
|
public static TrifocalTensor createTrifocal( Se3_F64 P2 , Se3_F64 P3 ,
@Nullable TrifocalTensor ret ) {
if( ret == null )
ret = new TrifocalTensor();
DMatrixRMaj R2 = P2.getR();
DMatrixRMaj R3 = P3.getR();
Vector3D_F64 T2 = P2.getT();
Vector3D_F64 T3 = P3.getT();
for( int col = 0; col < 3; col++ ) {
DMatrixRMaj T = ret.getT(col);
int index = 0;
for( int i = 0; i < 3; i++ ) {
double a_left = R2.unsafe_get(i,col);
double a_right = T2.getIdx(i);
for( int j = 0; j < 3; j++ ) {
T.data[index++] = a_left*T3.getIdx(j) - a_right*R3.unsafe_get(j,col);
}
}
}
return ret;
}
|
java
|
public boolean containsKey(int key) {
Entry tab[] = table;
int index = (key & 0x7FFFFFFF) % tab.length;
for (Entry e = tab[index]; e != null; e = e.next) {
if (e.key == key) {
return true;
}
}
return false;
}
|
java
|
public static int netmaskToPrefix(InetAddress netmask)
{
byte[] mask = netmask.getAddress();
if (!isValidNetmask(mask))
throw new IllegalArgumentException("Not a valid netmask: " + netmask.getHostAddress());
int prefix = 0;
for (int i = 0; i < mask.length; i++)
{
// drops-through all lower cases to accumulate additions (so case: -2 becomes prefix += 7)
switch (mask[i])
{
case -1:
prefix += 8; // Hand-optimisation for a 255 segment (since it's so frequent)
break;
case -2:
prefix++;
case -4:
prefix++;
case -8:
prefix++;
case -16:
prefix++;
case -32:
prefix++;
case -64:
prefix++;
case -128:
prefix++;
default:
}
}
return prefix;
}
|
python
|
def mtspec(data, delta, time_bandwidth, nfft=None, number_of_tapers=None,
quadratic=False, adaptive=True, verbose=False,
optional_output=False, statistics=False, rshape=False,
fcrit=False):
"""
Wrapper method for the mtspec subroutine in the library by German A.
Prieto.
This method estimates the adaptive weighted multitaper spectrum, as in
Thomson 1982. This is done by estimating the DPSS (discrete prolate
spheroidal sequences), multiplying each of the tapers with the data series,
take the FFT, and using the adaptive scheme for a better estimation. It
outputs the power spectral density (PSD).
:param data: :class:`numpy.ndarray`
Array with the data.
:param delta: float
Sample spacing of the data.
:param time_bandwidth: float
Time-bandwidth product. Common values are 2, 3, 4 and numbers in
between.
:param nfft: int
Number of points for fft. If nfft == None, no zero padding
will be applied before the fft
:param number_of_tapers: integer, optional
Number of tapers to use. Defaults to int(2*time_bandwidth) - 1. This
is maximum senseful amount. More tapers will have no great influence
on the final spectrum but increase the calculation time. Use fewer
tapers for a faster calculation.
:param quadratic: bool, optional
Whether or not to caluclate a quadratic multitaper. Will only work
if nfft is False or equal to the sample count. The nfft parameter
will overwrite the quadratic paramter. Defaults to False.
:param adaptive: bool, optional
Whether to use adaptive or constant weighting of the eigenspectra.
Defaults to True(adaptive).
:param verbose: bool, optional
Passed to the fortran library. Defaults to False.
:param optional_output: bool, optional
Calculates and returns additional output parameters. See the notes in
the docstring for further details.
:param statistics: bool, optional
Calculates and returns statistics. See the notes in the docstring for
further details.
:param rshape: integer/None, optional
Determines whether or not to perform the F-test for lines. If rshape
is 1 or 2, then don't put the lines back. If rshape is 2 only check
around 60 Hz. See the fortran source code for more informations.
Defaults to None (do not perform the F-test).
:param fcrit: float/None, optional
The threshold probability for the F-test. If none is given, the mtspec
library calculates a default value. See the fortran source code for
details. Defaults to None.
:return: Returns a list with :class:`numpy.ndarray`. See the note
below.
.. note::
This method will at return at least two arrays: The calculated spectrum
and the corresponding frequencies. If optional_output is true it will
also return (in the given order) (multidimensional) arrays containing
the eigenspectra, the corresponding eigencoefficients and an array
containing the weights for each eigenspectra normalized so that the sum
of squares over the eigenspectra is one. If statistics is True is will
also return (in the given order) (multidimensional) arrays containing
the jackknife 5% and 95% confidence intervals, the F statistics for
single line and the number of degrees of freedom for each frequency
bin. If both optional_output and statistics are true, the
optional_outputs will be returned before the statistics.
"""
npts = len(data)
# Depending if nfft is specified or not initialte MtspecTytpe
# for mtspec_pad_ or mtspec_d_
if nfft is None or nfft == npts:
nfft = npts
mt = _MtspecType("float64") # mtspec_d_
else:
mt = _MtspecType("float32") # mtspec_pad_
quadratic = False
# Use the optimal number of tapers in case no number is specified.
if number_of_tapers is None:
number_of_tapers = int(2 * time_bandwidth) - 1
# Transform the data to work with the library.
data = np.require(data, dtype=mt.float, requirements=[mt.order])
# Get some information necessary for the call to the Fortran library.
number_of_frequency_bins = int(nfft / 2) + 1
# Create output arrays.
spectrum = mt.empty(number_of_frequency_bins)
frequency_bins = mt.empty(number_of_frequency_bins)
# Create optional outputs.
if optional_output is True:
eigenspectra = mt.empty((number_of_frequency_bins, number_of_tapers))
eigencoefficients = mt.empty((nfft, number_of_tapers), complex=True)
weights = mt.empty((number_of_frequency_bins, number_of_tapers))
else:
eigenspectra = eigencoefficients = weights = None
# Create statistics.
if statistics is True:
jackknife_interval = mt.empty((number_of_frequency_bins, 2))
f_statistics = mt.empty(number_of_frequency_bins)
degrees_of_freedom = mt.empty(number_of_frequency_bins)
else:
jackknife_interval = f_statistics = degrees_of_freedom = None
# Verbose mode on or off.
if verbose is True:
verbose = C.byref(C.c_char('y'))
else:
verbose = None
# Determine whether or not to compute the quadratic multitaper.
if quadratic is True:
quadratic = C.byref(C.c_int(1))
else:
quadratic = None
# Determine whether to use adaptive or constant weighting of the
# eigenspectra.
if adaptive is True:
adaptive = None
else:
adaptive = C.byref(C.c_int(1))
# Determines whether or not to perform the F-test for lines. If rshape is 1
# or 2, then don't put the lines back. If rshape is 2 only check around 60
# Hz. See the fortran source code for more informations.
if type(rshape) == int:
rshape = C.byref(C.c_int(rshape))
else:
rshape = None
# The threshold probability for the F-test. If none is given, the mtspec
# library calculates a default value. See the fortran source code for
# details.
if type(fcrit) == float:
fcrit = C.byref(C.c_float(fcrit))
else:
fcrit = None
# Call the library. Fortran passes pointers!
args = [C.byref(C.c_int(npts)), C.byref(C.c_int(nfft)),
C.byref(mt.c_float(delta)), mt.p(data),
C.byref(mt.c_float(time_bandwidth)),
C.byref(C.c_int(number_of_tapers)),
C.byref(C.c_int(number_of_frequency_bins)), mt.p(frequency_bins),
mt.p(spectrum), verbose, quadratic, adaptive,
mt.p(eigencoefficients), mt.p(weights),
mt.p(jackknife_interval), mt.p(degrees_of_freedom),
mt.p(eigenspectra), rshape, mt.p(f_statistics), fcrit, None]
# diffrent arguments, depending on mtspec_pad_ or mtspec_d_, adapt
if npts == nfft:
args.pop(1)
# finally call the shared library function
mt.mtspec(*args)
# Figure out what to return. See the docstring of this method for details.
return_values = [spectrum, frequency_bins]
if optional_output is True:
return_values.extend([eigenspectra, eigencoefficients, weights])
if statistics is True:
return_values.extend([jackknife_interval, f_statistics,
degrees_of_freedom])
return return_values
|
python
|
def _do_bulk_write_command(
namespace, operation, command, docs, check_keys, opts, ctx):
"""Bulk write commands entry point."""
if ctx.sock_info.max_wire_version > 5:
return _do_batched_op_msg(
namespace, operation, command, docs, check_keys, opts, ctx)
return _do_batched_write_command(
namespace, operation, command, docs, check_keys, opts, ctx)
|
java
|
private static List<ImageConfiguration> filterImages(String nameFilter, List<ImageConfiguration> imagesToFilter) {
List<ImageConfiguration> ret = new ArrayList<>();
for (ImageConfiguration imageConfig : imagesToFilter) {
if (matchesConfiguredImages(nameFilter, imageConfig)) {
ret.add(imageConfig);
}
}
return ret;
}
|
java
|
static String obfuscateCredentials(String originalUrl, HttpUrl parsedUrl) {
String username = parsedUrl.username();
String password = parsedUrl.password();
if (username.isEmpty() && password.isEmpty()) {
return originalUrl;
}
if (!username.isEmpty() && !password.isEmpty()) {
String encodedUsername = parsedUrl.encodedUsername();
String encodedPassword = parsedUrl.encodedPassword();
return Stream.<Supplier<String>>of(
() -> replaceOrDie(originalUrl, username, password),
() -> replaceOrDie(originalUrl, encodedUsername, encodedPassword),
() -> replaceOrDie(originalUrl, encodedUsername, password),
() -> replaceOrDie(originalUrl, username, encodedPassword))
.map(Supplier::get)
.filter(Objects::nonNull)
.findFirst()
.orElse(originalUrl);
}
if (!username.isEmpty()) {
return Stream.<Supplier<String>>of(
() -> replaceOrDie(originalUrl, username, null),
() -> replaceOrDie(originalUrl, parsedUrl.encodedUsername(), null))
.map(Supplier::get)
.filter(Objects::nonNull)
.findFirst()
.orElse(originalUrl);
}
checkState(password.isEmpty(), "having a password without a username should never occur");
return originalUrl;
}
|
python
|
def types(self):
''' Returns an iterator over the types of the neurites in the object.
If the object is a tree, then one value is returned.
'''
neurites = self._obj.neurites if hasattr(self._obj, 'neurites') else (self._obj,)
return (neu.type for neu in neurites)
|
python
|
def fitString(s, maxCol=79, newlineReplacement=None):
r"""Truncate `s` if necessary to fit into a line of width `maxCol`
(default: 79), also replacing newlines with `newlineReplacement` (default
`None`: in which case everything after the first newline is simply
discarded).
Examples:
>>> fitString('12345', maxCol=5)
'12345'
>>> fitString('123456', maxCol=5)
'12...'
>>> fitString('a line\na second line')
'a line'
>>> fitString('a line\na second line', newlineReplacement='\\n')
'a line\\na second line'
"""
assert isString(s)
if '\n' in s:
if newlineReplacement is None:
s = s[:s.index('\n')]
else:
s = s.replace("\n", newlineReplacement)
if maxCol is not None and len(s) > maxCol:
s = "%s..." % s[:maxCol-3]
return s
|
java
|
public AVIMConversation getConversation(String conversationId) {
if (StringUtil.isEmpty(conversationId)) {
return null;
}
return this.getConversation(conversationId, false, conversationId.startsWith(Conversation.TEMPCONV_ID_PREFIX));
}
|
java
|
public RollbackConfiguration withRollbackTriggers(RollbackTrigger... rollbackTriggers) {
if (this.rollbackTriggers == null) {
setRollbackTriggers(new com.amazonaws.internal.SdkInternalList<RollbackTrigger>(rollbackTriggers.length));
}
for (RollbackTrigger ele : rollbackTriggers) {
this.rollbackTriggers.add(ele);
}
return this;
}
|
python
|
def exclude_from_stock_ip_list(exclude_ip_list):
""" 从stock_ip_list删除列表exclude_ip_list中的ip
从stock_ip_list删除列表future_ip_list中的ip
:param exclude_ip_list: 需要删除的ip_list
:return: None
"""
for exc in exclude_ip_list:
if exc in stock_ip_list:
stock_ip_list.remove(exc)
# 扩展市场
for exc in exclude_ip_list:
if exc in future_ip_list:
future_ip_list.remove(exc)
|
python
|
def repr_part(self):
"""String usable in a space's ``__repr__`` method."""
optargs = [('weighting', self.const, 1.0),
('exponent', self.exponent, 2.0)]
return signature_string([], optargs, mod=':.4')
|
java
|
public Timestamp getTimestamp(final int columnIndex, final Calendar cal) throws SQLException {
try {
final Timestamp result = getValueObject(columnIndex).getTimestamp(cal);
if (result == null) {
return null;
}
return new Timestamp(result.getTime());
} catch (ParseException e) {
throw SQLExceptionMapper.getSQLException("Could not parse as time");
}
}
|
python
|
def bytescale(data, cmin=None, cmax=None, high=255, low=0):
"""
Byte scales an array (image).
Byte scaling means converting the input image to uint8 dtype and scaling
the range to ``(low, high)`` (default 0-255).
If the input image already has dtype uint8, no scaling is done.
Parameters
----------
data : ndarray
PIL image data array.
cmin : scalar, optional
Bias scaling of small values. Default is ``data.min()``.
cmax : scalar, optional
Bias scaling of large values. Default is ``data.max()``.
high : scalar, optional
Scale max value to `high`. Default is 255.
low : scalar, optional
Scale min value to `low`. Default is 0.
Returns
-------
img_array : uint8 ndarray
The byte-scaled array.
Examples
--------
>>> from scipy.misc import bytescale
>>> img = np.array([[ 91.06794177, 3.39058326, 84.4221549 ],
... [ 73.88003259, 80.91433048, 4.88878881],
... [ 51.53875334, 34.45808177, 27.5873488 ]])
>>> bytescale(img)
array([[255, 0, 236],
[205, 225, 4],
[140, 90, 70]], dtype=uint8)
>>> bytescale(img, high=200, low=100)
array([[200, 100, 192],
[180, 188, 102],
[155, 135, 128]], dtype=uint8)
>>> bytescale(img, cmin=0, cmax=255)
array([[91, 3, 84],
[74, 81, 5],
[52, 34, 28]], dtype=uint8)
"""
if data.dtype == uint8:
return data
if high > 255:
raise ValueError("`high` should be less than or equal to 255.")
if low < 0:
raise ValueError("`low` should be greater than or equal to 0.")
if high < low:
raise ValueError("`high` should be greater than or equal to `low`.")
if cmin is None:
cmin = data.min()
if cmax is None:
cmax = data.max()
cscale = cmax - cmin
if cscale < 0:
raise ValueError("`cmax` should be larger than `cmin`.")
elif cscale == 0:
cscale = 1
scale = float(high - low) / cscale
bytedata = (data - cmin) * scale + low
return (bytedata.clip(low, high) + 0.5).astype(uint8)
|
java
|
public ServiceFuture<PolicyAssignmentInner> deleteAsync(String scope, String policyAssignmentName, final ServiceCallback<PolicyAssignmentInner> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(scope, policyAssignmentName), serviceCallback);
}
|
python
|
def update_webhook(self, webhook, name=None, metadata=None):
"""
Updates the specified webhook. One or more of the parameters may be
specified.
"""
return self.manager.update_webhook(self.scaling_group, policy=self,
webhook=webhook, name=name, metadata=metadata)
|
python
|
def get_page_tags(page):
"""
Retrieves all the tags for a Page instance.
:param page: a Page instance
:return: list or queryset of attached tags
:type: List
"""
from .models import PageTags
try:
return page.pagetags.tags.all()
except PageTags.DoesNotExist:
return []
|
java
|
private void indexCRL(X509CRL crl) {
X500Principal issuer = crl.getIssuerX500Principal();
Object oldEntry = crlIssuers.put(issuer, crl);
if (oldEntry != null) { // assume this is unlikely
if (oldEntry instanceof X509CRL) {
if (crl.equals(oldEntry)) {
return;
}
List<X509CRL> list = new ArrayList<>(2);
list.add(crl);
list.add((X509CRL)oldEntry);
crlIssuers.put(issuer, list);
} else {
// See crlIssuers javadoc.
@SuppressWarnings("unchecked")
List<X509CRL> list = (List<X509CRL>)oldEntry;
if (list.contains(crl) == false) {
list.add(crl);
}
crlIssuers.put(issuer, list);
}
}
}
|
python
|
def get_for_resource(resource):
""" Get list of all price list items that should be used for resource.
If price list item is defined for service - return it, otherwise -
return default price list item.
"""
resource_content_type = ContentType.objects.get_for_model(resource)
default_items = set(DefaultPriceListItem.objects.filter(resource_content_type=resource_content_type))
service = resource.service_project_link.service
items = set(PriceListItem.objects.filter(
default_price_list_item__in=default_items, service=service).select_related('default_price_list_item'))
rewrited_defaults = set([i.default_price_list_item for i in items])
return items | (default_items - rewrited_defaults)
|
java
|
public int listChildren(SftpFile file, Vector<SftpFile> children)
throws SftpStatusException, SshException {
if (file.isDirectory()) {
if (file.getHandle() == null) {
file = openDirectory(file.getAbsolutePath());
if (file.getHandle() == null) {
throw new SftpStatusException(
SftpStatusException.SSH_FX_FAILURE,
"Failed to open directory");
}
}
} else {
throw new SshException("Cannot list children for this file object",
SshException.BAD_API_USAGE);
}
try {
UnsignedInteger32 requestId = nextRequestId();
Packet msg = createPacket();
msg.write(SSH_FXP_READDIR);
msg.writeInt(requestId.longValue());
msg.writeBinaryString(file.getHandle());
sendMessage(msg);
;
SftpMessage bar = getResponse(requestId);
if (bar.getType() == SSH_FXP_NAME) {
SftpFile[] files = extractFiles(bar, file.getAbsolutePath());
for (int i = 0; i < files.length; i++) {
children.addElement(files[i]);
}
return files.length;
} else if (bar.getType() == SSH_FXP_STATUS) {
int status = (int) bar.readInt();
if (status == SftpStatusException.SSH_FX_EOF) {
return -1;
}
if (version >= 3) {
String desc = bar.readString().trim();
throw new SftpStatusException(status, desc);
}
throw new SftpStatusException(status);
} else {
close();
throw new SshException(
"The server responded with an unexpected message",
SshException.CHANNEL_FAILURE);
}
} catch (SshIOException ex) {
throw ex.getRealException();
} catch (IOException ex) {
throw new SshException(ex);
}
}
|
java
|
@Indexable(type = IndexableType.REINDEX)
@Override
public CommerceNotificationAttachment addCommerceNotificationAttachment(
CommerceNotificationAttachment commerceNotificationAttachment) {
commerceNotificationAttachment.setNew(true);
return commerceNotificationAttachmentPersistence.update(commerceNotificationAttachment);
}
|
java
|
public void doTag()
throws JspException {
DataGridTagModel dgm = DataGridUtil.getDataGridTagModel(getJspContext());
if(dgm == null)
throw new JspException(Bundle.getErrorString("DataGridTags_MissingDataGridModel"));
PagerModel pm = dgm.getState().getPagerModel();
assert pm != null;
if(_disableDefaultPager != null)
dgm.setDisableDefaultPagerRendering(_disableDefaultPager.booleanValue());
if(_pageSize != null)
pm.setPageSize(_pageSize.intValue());
if(_defaultPageSize != null)
pm.setDefaultPageSize(_defaultPageSize.intValue());
PagerRenderer pagerRenderer = null;
if(_pagerRendererClass != null) {
try {
pagerRenderer = (PagerRenderer)ExtensionUtil.instantiateClass(_pagerRendererClass, PagerRenderer.class);
assert pagerRenderer != null : "Expected a non-null pager renderer of type \"" + _pagerRendererClass + "\"";
}
catch(DataGridExtensionException e) {
String msg = Bundle.getErrorString("ConfigurePager_CantCreateCustomPagerRenderer", new Object[]{e});
JspException jsp = new JspException(msg, e);
// todo: future cleanup
// The 2.5 Servlet api will set the initCause in the Throwable superclass during construction,
// this will cause an IllegalStateException on the following call.
if (jsp.getCause() == null) {
jsp.initCause(e);
}
throw jsp;
}
}
else if(_pagerFormat != null) {
if(_pagerFormat.equals(PAGER_FORMAT_FIRST_LAST_PREV_NEXT))
pagerRenderer = new FirstPreviousNextLastPagerRenderer();
else if(_pagerFormat.equals(PAGER_FORMAT_PREV_NEXT))
pagerRenderer = new PreviousNextPagerRenderer();
else
throw new JspException(Bundle.getErrorString("ConfigurePager_UnsupportedPagerFormat", new Object[]{_pagerFormat}));
}
if(pagerRenderer != null)
dgm.setPagerRenderer(pagerRenderer);
if(_pageHref != null && _pageAction != null)
throw new JspException(Bundle.getErrorString("ConfigurePager_CantSetHrefAndAction"));
if(_pageHref == null && _pageAction == null) {
LOGGER.info("The configurePager tag has no page action or HREF specified; using the current request URI instead.");
_pageHref = JspUtil.getRequest(getJspContext()).getRequestURI();
}
if(_pageHref != null)
pm.setPageHref(_pageHref);
if(_pageAction != null)
pm.setPageAction(_pageAction);
}
|
java
|
private boolean attemptDateRangeOptimization(BinaryExpression during, Retrieve retrieve, String alias) {
if (retrieve.getDateProperty() != null || retrieve.getDateRange() != null) {
return false;
}
Expression left = during.getOperand().get(0);
Expression right = during.getOperand().get(1);
String propertyPath = getPropertyPath(left, alias);
if (propertyPath != null && isRHSEligibleForDateRangeOptimization(right)) {
retrieve.setDateProperty(propertyPath);
retrieve.setDateRange(right);
return true;
}
return false;
}
|
python
|
def profile_view(request, user_id=None):
"""Displays a view of a user's profile.
Args:
user_id
The ID of the user whose profile is being viewed. If not
specified, show the user's own profile.
"""
if request.user.is_eighthoffice and "full" not in request.GET and user_id is not None:
return redirect("eighth_profile", user_id=user_id)
if user_id is not None:
try:
profile_user = User.objects.get(id=user_id)
if profile_user is None:
raise Http404
except User.DoesNotExist:
raise Http404
else:
profile_user = request.user
num_blocks = 6
eighth_schedule = []
start_block = EighthBlock.objects.get_first_upcoming_block()
blocks = []
if start_block:
blocks = [start_block] + list(start_block.next_blocks(num_blocks - 1))
for block in blocks:
sch = {"block": block}
try:
sch["signup"] = EighthSignup.objects.get(scheduled_activity__block=block, user=profile_user)
except EighthSignup.DoesNotExist:
sch["signup"] = None
except MultipleObjectsReturned:
client.captureException()
sch["signup"] = None
eighth_schedule.append(sch)
if profile_user.is_eighth_sponsor:
sponsor = EighthSponsor.objects.get(user=profile_user)
start_date = get_start_date(request)
eighth_sponsor_schedule = (EighthScheduledActivity.objects.for_sponsor(sponsor).filter(block__date__gte=start_date).order_by(
"block__date", "block__block_letter"))
eighth_sponsor_schedule = eighth_sponsor_schedule[:10]
else:
eighth_sponsor_schedule = None
admin_or_teacher = (request.user.is_eighth_admin or request.user.is_teacher)
can_view_eighth = (profile_user.can_view_eighth or request.user == profile_user)
eighth_restricted_msg = (not can_view_eighth and admin_or_teacher)
if not can_view_eighth and not request.user.is_eighth_admin and not request.user.is_teacher:
eighth_schedule = []
has_been_nominated = profile_user.username in [
u.nominee.username for u in request.user.nomination_votes.filter(position__position_name=settings.NOMINATION_POSITION)
]
context = {
"profile_user": profile_user,
"eighth_schedule": eighth_schedule,
"can_view_eighth": can_view_eighth,
"eighth_restricted_msg": eighth_restricted_msg,
"eighth_sponsor_schedule": eighth_sponsor_schedule,
"nominations_active": settings.NOMINATIONS_ACTIVE,
"nomination_position": settings.NOMINATION_POSITION,
"has_been_nominated": has_been_nominated
}
return render(request, "users/profile.html", context)
|
python
|
def assert_allclose(actual, desired, rtol=1.e-5, atol=1.e-8,
err_msg='', verbose=True):
r"""wrapper for numpy.testing.allclose with default tolerances of
numpy.allclose. Needed since testing method has different values."""
return assert_allclose_np(actual, desired, rtol=rtol, atol=atol,
err_msg=err_msg, verbose=verbose)
|
python
|
def GetHandlers(self):
"""Retrieve the appropriate urllib2 handlers for the given configuration.
Returns:
A list of urllib2.BaseHandler subclasses to be used when making calls
with proxy.
"""
handlers = []
if self.ssl_context:
handlers.append(urllib2.HTTPSHandler(context=self.ssl_context))
if self.proxies:
handlers.append(urllib2.ProxyHandler(self.proxies))
return handlers
|
python
|
def append(self, item):
"""Append item to end of model"""
self.beginInsertRows(QtCore.QModelIndex(),
self.rowCount(),
self.rowCount())
self.items.append(item)
self.endInsertRows()
|
java
|
@Override
public void cacheResult(List<CPDefinitionInventory> cpDefinitionInventories) {
for (CPDefinitionInventory cpDefinitionInventory : cpDefinitionInventories) {
if (entityCache.getResult(
CPDefinitionInventoryModelImpl.ENTITY_CACHE_ENABLED,
CPDefinitionInventoryImpl.class,
cpDefinitionInventory.getPrimaryKey()) == null) {
cacheResult(cpDefinitionInventory);
}
else {
cpDefinitionInventory.resetOriginalValues();
}
}
}
|
python
|
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
|
python
|
def modify_account(self, account, attrs):
"""
:param account: a zobjects.Account
:param attrs : a dictionary of attributes to set ({key:value,...})
"""
attrs = [{'n': k, '_content': v} for k, v in attrs.items()]
self.request('ModifyAccount', {
'id': self._get_or_fetch_id(account, self.get_account),
'a': attrs
})
|
java
|
public void run(String... args) {
int maxIteration = 10;
int totalMoreFast = 0;
int totalMoreFastWithThreadPool = 0;
int totalMoreFastPacketThreadPool = 0;
for (int i = 0; i < maxIteration; i++) {
LOGGER.info("**********************************");
long jxnetRunnerRes = jxnetRunner.run();
long jxnetWithThreadPoolRunnerRes = jxnetWithThreadPoolRunner.run();
long jxnetPacketThreadPoolRunnerRes = springJxnetWithThreadPoolRunner.run();
long pcap4jRunnerRes = pcap4jRunner.run();
long pcap4jWithThreadPoolRunnerRes = pcap4jWithThreadPoolRunner.run();
long pcap4jPacketThreadPoolRunnerRes = pcap4jPacketWithThreadPoolRunner.run();
LOGGER.info("Jxnet x Pcap4j");
boolean moreFast = jxnetRunnerRes < pcap4jRunnerRes;
boolean moreFastWithThreadPool = jxnetWithThreadPoolRunnerRes < pcap4jWithThreadPoolRunnerRes;
boolean moreFastPacketThreadPool = jxnetPacketThreadPoolRunnerRes < pcap4jPacketThreadPoolRunnerRes;
if (moreFast) {
totalMoreFast++;
}
if (moreFastWithThreadPool) {
totalMoreFastWithThreadPool++;
}
if (moreFastPacketThreadPool) {
totalMoreFastPacketThreadPool++;
}
LOGGER.info("Is Jxnet runner more fast? {} : {}",
moreFast ? "YES" : "NO",
jxnetRunnerRes + " and " + pcap4jRunnerRes);
LOGGER.info("Is Jxnet with thread pool runner more fast? {} : {}",
moreFastWithThreadPool ? "YES" : "NO",
jxnetWithThreadPoolRunnerRes + " and " + pcap4jWithThreadPoolRunnerRes);
LOGGER.info("IS Jxnet packet with thread pool runner more fast? {} : {}",
moreFastPacketThreadPool ? "YES" : "NO",
jxnetPacketThreadPoolRunnerRes + " and " + pcap4jPacketThreadPoolRunnerRes);
LOGGER.info("**********************************\n");
}
LOGGER.info("Total jxnet more fast : {}/{}", totalMoreFast, maxIteration);
LOGGER.info("Total jxnet more fast with thread pool : {}/{}", totalMoreFastWithThreadPool, maxIteration);
LOGGER.info("Total jxnet more fast packet decoder with thread pool : {}/{}", totalMoreFastPacketThreadPool, maxIteration);
executorService.shutdownNow();
}
|
python
|
def _read_private_key_file(self, tag, filename, password=None):
"""
Read an SSH2-format private key file, looking for a string of the type
``"BEGIN xxx PRIVATE KEY"`` for some ``xxx``, base64-decode the text we
find, and return it as a string. If the private key is encrypted and
``password`` is not ``None``, the given password will be used to decrypt
the key (otherwise `.PasswordRequiredException` is thrown).
:param str tag: ``"RSA"`` or ``"DSA"``, the tag used to mark the data block.
:param str filename: name of the file to read.
:param str password:
an optional password to use to decrypt the key file, if it's
encrypted.
:return: data blob (`str`) that makes up the private key.
:raises IOError: if there was an error reading the file.
:raises PasswordRequiredException: if the private key file is
encrypted, and ``password`` is ``None``.
:raises SSHException: if the key file is invalid.
"""
with open(filename, 'r') as f:
data = self._read_private_key(tag, f, password)
return data
|
python
|
def get_cohp(self, spin=None, integrated=False):
"""
Returns the COHP or ICOHP for a particular spin.
Args:
spin: Spin. Can be parsed as spin object, integer (-1/1)
or str ("up"/"down")
integrated: Return COHP (False) or ICOHP (True)
Returns:
Returns the CHOP or ICOHP for the input spin. If Spin is
None and both spins are present, both spins will be returned
as a dictionary.
"""
if not integrated:
populations = self.cohp
else:
populations = self.icohp
if populations is None:
return None
elif spin is None:
return populations
else:
if isinstance(spin, int):
spin = Spin(spin)
elif isinstance(spin, str):
s = {"up": 1, "down": -1}[spin.lower()]
spin = Spin(s)
return {spin: populations[spin]}
|
java
|
public static Object columnlist(PageContext pc, Object obj) throws PageException {
if (obj instanceof Query) {
Key[] columnNames = ((Query) obj).getColumnNames();
boolean upperCase = pc.getCurrentTemplateDialect() == CFMLEngine.DIALECT_CFML;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < columnNames.length; i++) {
if (i > 0) sb.append(',');
sb.append(upperCase ? columnNames[i].getUpperString() : columnNames[i].getString());
}
return sb.toString();
}
return pc.getCollection(obj, KeyConstants._COLUMNLIST);
}
|
java
|
protected void updateReporting(Reporting value, String xmlTag, Counter counter, Element element)
{
boolean shouldExist = value != null;
Element root = updateElement(counter, element, xmlTag, shouldExist);
if (shouldExist)
{
Counter innerCount = new Counter(counter.getDepth() + 1);
findAndReplaceSimpleElement(innerCount, root, "excludeDefaults",
(value.isExcludeDefaults() == false) ? null : String.valueOf(value.isExcludeDefaults()), "false");
findAndReplaceSimpleElement(innerCount, root, "outputDirectory", value.getOutputDirectory(), null);
iterateReportPlugin(innerCount, root, value.getPlugins(), "plugins", "plugin");
}
}
|
python
|
def fcoe_get_login_output_fcoe_login_list_fcoe_login_interface_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_get_login = ET.Element("fcoe_get_login")
config = fcoe_get_login
output = ET.SubElement(fcoe_get_login, "output")
fcoe_login_list = ET.SubElement(output, "fcoe-login-list")
fcoe_login_session_mac_key = ET.SubElement(fcoe_login_list, "fcoe-login-session-mac")
fcoe_login_session_mac_key.text = kwargs.pop('fcoe_login_session_mac')
fcoe_login_interface_name = ET.SubElement(fcoe_login_list, "fcoe-login-interface-name")
fcoe_login_interface_name.text = kwargs.pop('fcoe_login_interface_name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
java
|
public static void write(Image image, String format, OutputStream out) throws SlickException {
write(image, format, out, DEFAULT_ALPHA_WRITE);
}
|
python
|
def register_handler(self, name, metadata, callback):
"""Register subscriptions and their event handlers.
:param str name: the subscription name as used by watchman
:param dict metadata: a dictionary of metadata to be serialized and passed to the watchman
subscribe command. this should include the match expression as well
as any required callback fields.
:param func callback: the callback to execute on each matching filesystem event
"""
assert name not in self._handlers, 'duplicate handler name: {}'.format(name)
assert (
isinstance(metadata, dict) and 'fields' in metadata and 'expression' in metadata
), 'invalid handler metadata!'
self._handlers[name] = Watchman.EventHandler(name=name, metadata=metadata, callback=callback)
|
python
|
def textContent(self):
'''
textContent - property, gets the text of this node and all inner nodes.
Use .innerText for just this node's text
@return <str> - The text of all nodes at this level or lower
'''
def _collateText(curNode):
'''
_collateText - Recursive function to gather the "text" of all blocks
in the order that they appear
@param curNode <AdvancedTag> - The current AdvancedTag to process
@return list<str> - A list of strings in order. Join using '' to obtain text
as it would appear
'''
curStrLst = []
blocks = object.__getattribute__(curNode, 'blocks')
for block in blocks:
if isTagNode(block):
curStrLst += _collateText(block)
else:
curStrLst.append(block)
return curStrLst
return ''.join(_collateText(self))
|
java
|
public FutureData<HistoricsPreviewData> get(HistoricsPreview preview) {
if (preview == null || preview.id() == null) {
throw new IllegalArgumentException("A valid preview isntance is required");
}
FutureData<HistoricsPreviewData> future = new FutureData<HistoricsPreviewData>();
URI uri = newParams().forURL(config.newAPIEndpointURI(GET));
POST request = config.http()
.POST(uri, new PageReader(newRequestCallback(future, new HistoricsPreviewData(), config)))
.form("id", preview.id());
performRequest(future, request);
return future;
}
|
python
|
def fetch(self):
""" Fetch more results from Dynamo """
self.limit.set_request_args(self.kwargs)
data = self.connection.call(*self.args, **self.kwargs)
self.limit.post_fetch(data)
self.last_evaluated_key = data.get('LastEvaluatedKey')
if self.last_evaluated_key is None:
self.kwargs.pop('ExclusiveStartKey', None)
else:
self.kwargs['ExclusiveStartKey'] = self.last_evaluated_key
self._update_capacity(data)
if 'consumed_capacity' in data:
self.consumed_capacity += data['consumed_capacity']
for raw_item in data['Items']:
item = self.connection.dynamizer.decode_keys(raw_item)
if self.limit.accept(item):
yield item
|
java
|
@Override
public java.util.concurrent.Future<DescribeAlarmsResult> describeAlarmsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeAlarmsRequest, DescribeAlarmsResult> asyncHandler) {
return describeAlarmsAsync(new DescribeAlarmsRequest(), asyncHandler);
}
|
python
|
def _mcon(self):
""" Buffer IRC data and handle PING/PONG. """
with self.lock:
sdata = ' '
while sdata[-1] != self._crlf[-1]:
if sdata == ' ':
sdata = ''
try:
sdata = sdata + \
self._socket.recv(4096).decode(self.encoding)
except UnicodeDecodeError:
sdata = sdata + \
self._socket.recv(4096).decode(self.fallback_encoding)
lines = sdata.split(self._crlf)
for line in lines:
if line.find('PING :') == 0:
self.send(line.replace('PING', 'PONG'))
if line != '':
self._buffer.append(line)
|
python
|
def interconnect_all(self):
"""Propagate dependencies for provided instances"""
for dep in topologically_sorted(self._provides):
if hasattr(dep, '__injections__') and not hasattr(dep, '__injections_source__'):
self.inject(dep)
|
java
|
public Map<String, String> getTags() {
Map<String, String> distilledTags = new HashMap<String, String>();
for (Map.Entry<String, String> entry : potentialTags.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (tagCounts.get(key).equals(totalAdds)) {
distilledTags.put(key, value);
}
}
return distilledTags;
}
|
python
|
def balance_scores_and_dont_scale(cat_scores, not_cat_scores):
'''
median = np.median(cat_scores)
scores = np.zeros(len(cat_scores)).astype(np.float)
scores[cat_scores > median] = cat_scores[cat_scores > median]
not_cat_mask = cat_scores < median if median != 0 else cat_scores <= median
scores[not_cat_mask] = -not_cat_scores[not_cat_mask]
'''
scores = np.zeros(len(cat_scores)).astype(np.float)
scores[cat_scores > not_cat_scores] = cat_scores[cat_scores > not_cat_scores]
scores[cat_scores < not_cat_scores] = -not_cat_scores[cat_scores < not_cat_scores]
return scores
|
java
|
public List<Template> list(){
String url = WxEndpoint.get("url.template.list");
logger.debug("template message, list templates.");
String response = wxClient.get(url);
TemplateListWrapper templateListWrapper = JsonMapper.defaultMapper().fromJson(response, TemplateListWrapper.class);
return templateListWrapper.getTemplateList();
}
|
python
|
def transcribe(rune_sentence: str, runic_alphabet: list):
"""
From a runic inscription, the transcribe method gives a conventional transcription.
:param rune_sentence: str, elements of this are from runic_alphabet or are punctuations
:param runic_alphabet: list
:return:
"""
res = []
d_form_transcription = Transcriber.from_form_to_transcription(runic_alphabet)
for c in rune_sentence:
if c in runic_alphabet:
res.append(d_form_transcription[c])
elif c in "()":
res.append(c)
else:
res.append(POINT)
return "".join(res)
|
java
|
public ArrayList<Integer> getVisited() {
if (0 == (this.registry.length - this.unvisitedCount)) {
return new ArrayList<Integer>();
}
ArrayList<Integer> res = new ArrayList<Integer>(this.registry.length - this.unvisitedCount);
for (int i = 0; i < this.registry.length; i++) {
if (ONE == this.registry[i]) {
res.add(i);
}
}
return res;
}
|
java
|
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case AfplibPackage.RPS__RLENGTH:
return RLENGTH_EDEFAULT == null ? rlength != null : !RLENGTH_EDEFAULT.equals(rlength);
case AfplibPackage.RPS__RPTDATA:
return RPTDATA_EDEFAULT == null ? rptdata != null : !RPTDATA_EDEFAULT.equals(rptdata);
}
return super.eIsSet(featureID);
}
|
java
|
public void prepare(String sql, int connectionFlags, CancellationSignal cancellationSignal,
SQLiteStatementInfo outStatementInfo) {
if (sql == null) {
throw new IllegalArgumentException("sql must not be null.");
}
if (cancellationSignal != null) {
cancellationSignal.throwIfCanceled();
}
acquireConnection(sql, connectionFlags, cancellationSignal); // might throw
try {
mConnection.prepare(sql, outStatementInfo); // might throw
} finally {
releaseConnection(); // might throw
}
}
|
java
|
public RetryInterceptorBuilder<T> backOffOptions(long initialInterval,
double multiplier, long maxInterval) {
Assert.isNull(this.retryOperations,
"cannot set the back off policy when a custom retryOperations has been set");
Assert.isTrue(!this.backOffPolicySet,
"cannot set the back off options when a back off policy has been set");
ExponentialBackOffPolicy policy = new ExponentialBackOffPolicy();
policy.setInitialInterval(initialInterval);
policy.setMultiplier(multiplier);
policy.setMaxInterval(maxInterval);
this.retryTemplate.setBackOffPolicy(policy);
this.backOffOptionsSet = true;
this.templateAltered = true;
return this;
}
|
java
|
public void marshall(ListTagsForResourceRequest listTagsForResourceRequest, ProtocolMarshaller protocolMarshaller) {
if (listTagsForResourceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listTagsForResourceRequest.getArn(), ARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public User getByEmail( Connection conn, String email) throws SQLException {
ResultSetHandler<User> h = new BeanHandler<User>(User.class);
String sql =
"SELECT id,username,fullname,type,emailaddress,registered_ts,invited_by,enabled \n"+
"FROM userbase \n"+
"WHERE emailaddress=?";
return run.query(conn, sql, h, email);
}
|
java
|
public GetSessionResponse getSession(GetSessionRequest request) {
checkNotNull(request, "The parameter request should NOT be null.");
checkStringNotEmpty(request.getSessionId(), "The parameter sessionId should NOT be null or empty string.");
InternalRequest internalRequest = createRequest(HttpMethodName.GET, request, LIVE_SESSION,
request.getSessionId());
return invokeHttpClient(internalRequest, GetSessionResponse.class);
}
|
python
|
def read_varint64(self):
"""Reads a varint from the stream, interprets this varint
as a signed, 64-bit integer, and returns the integer.
"""
i = self.read_var_uint64()
if i > wire_format.INT64_MAX:
i -= (1 << 64)
return i
|
java
|
public void removeMember (FormationMember<T> member) {
// Find the member's slot
int slot = findMemberSlot(member);
// Make sure we've found a valid result
if (slot >= 0) {
// Remove the slot
// slotAssignments.removeIndex(slot);
slotAssignmentStrategy.removeSlotAssignment(slotAssignments, slot);
// Update the assignments
updateSlotAssignments();
}
}
|
java
|
public StringBuilder toStream(final StringBuilder iOutput, Object iValue) {
if (iValue != null) {
if (!(iValue instanceof OSerializableStream))
throw new OSerializationException("Cannot serialize the object since it's not implements the OSerializableStream interface");
OSerializableStream stream = (OSerializableStream) iValue;
iOutput.append(iValue.getClass().getName());
iOutput.append(OStreamSerializerHelper.SEPARATOR);
iOutput.append(OBinaryProtocol.bytes2string(stream.toStream()));
}
return iOutput;
}
|
java
|
public Object get(String name, ObjectFactory<?> factory) {
ThreadScopeContext context = ThreadScopeContextHolder.getContext();
Object result = context.getBean(name);
if (null == result) {
result = factory.getObject();
context.setBean(name, result);
}
return result;
}
|
java
|
private static void loadDisabledAlgorithmsMap(
final String propertyName) {
String property = AccessController.doPrivileged(
new PrivilegedAction<String>() {
public String run() {
return Security.getProperty(propertyName);
}
});
String[] algorithmsInProperty = null;
if (property != null && !property.isEmpty()) {
// remove double quote marks from beginning/end of the property
if (property.charAt(0) == '"' &&
property.charAt(property.length() - 1) == '"') {
property = property.substring(1, property.length() - 1);
}
algorithmsInProperty = property.split(",");
for (int i = 0; i < algorithmsInProperty.length; i++) {
algorithmsInProperty[i] = algorithmsInProperty[i].trim();
}
}
// map the disabled algorithms
if (algorithmsInProperty == null) {
algorithmsInProperty = new String[0];
}
disabledAlgorithmsMap.put(propertyName, algorithmsInProperty);
// map the key constraints
KeySizeConstraints keySizeConstraints =
new KeySizeConstraints(algorithmsInProperty);
keySizeConstraintsMap.put(propertyName, keySizeConstraints);
}
|
java
|
public final void variableDeclarators() throws RecognitionException {
int variableDeclarators_StartIndex = input.index();
try {
if ( state.backtracking>0 && alreadyParsedRule(input, 38) ) { return; }
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:451:5: ( variableDeclarator ( ',' variableDeclarator )* )
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:451:7: variableDeclarator ( ',' variableDeclarator )*
{
pushFollow(FOLLOW_variableDeclarator_in_variableDeclarators1303);
variableDeclarator();
state._fsp--;
if (state.failed) return;
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:451:26: ( ',' variableDeclarator )*
loop53:
while (true) {
int alt53=2;
int LA53_0 = input.LA(1);
if ( (LA53_0==43) ) {
alt53=1;
}
switch (alt53) {
case 1 :
// src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:451:27: ',' variableDeclarator
{
match(input,43,FOLLOW_43_in_variableDeclarators1306); if (state.failed) return;
pushFollow(FOLLOW_variableDeclarator_in_variableDeclarators1308);
variableDeclarator();
state._fsp--;
if (state.failed) return;
}
break;
default :
break loop53;
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
// do for sure before leaving
if ( state.backtracking>0 ) { memoize(input, 38, variableDeclarators_StartIndex); }
}
}
|
java
|
public List<String> attrNames() {
String[] names = this._getAttrNames();
if (null != names && names.length != 0) {
return Arrays.asList(names);
}
Method[] methods = this._getUsefulClass().getMethods();
String methodName;
List<String> attrs = new ArrayList<String>();
for (Method method : methods) {
methodName = method.getName();
if (methodName.startsWith("set") ) {
String attr = methodName.substring(3).toLowerCase();
if (StrKit.notBlank(attr)) {
attrs.add(attr);
}
}
}
return attrs;
}
|
python
|
def validate_arg(f,
arg_name,
*validation_func, # type: ValidationFuncs
**kwargs
):
# type: (...) -> Callable
"""
A decorator to apply function input validation for the given argument name, with the provided base validation
function(s). You may use several such decorators on a given function as long as they are stacked on top of each
other (no external decorator in the middle)
:param arg_name:
:param validation_func: the base validation function or list of base validation functions to use. A callable, a
tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists
are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit
`_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead
of callables, they will be transformed to functions automatically.
:param error_type: a subclass of ValidationError to raise in case of validation failure. By default a
ValidationError will be raised with the provided help_msg
:param help_msg: an optional help message to be used in the raised error in case of validation failure.
:param none_policy: describes how None values should be handled. See `NoneArgPolicy` for the various
possibilities. Default is `NoneArgPolicy.ACCEPT_IF_OPTIONAl_ELSE_VALIDATE`.
:param kw_context_args: optional contextual information to store in the exception, and that may be also used
to format the help message
:return: a function decorator, able to transform a function into a function that will perform input validation
before executing the function's code everytime it is executed.
"""
return decorate_with_validation(f, arg_name, *validation_func, **kwargs)
|
python
|
def uninstalled(name):
'''
Ensure an update is uninstalled from the minion
Args:
name(str):
Name of the Windows KB ("KB123456")
Example:
.. code-block:: yaml
KB123456:
wusa.uninstalled
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
# Is the KB already uninstalled
if not __salt__['wusa.is_installed'](name):
ret['result'] = True
ret['comment'] = '{0} already uninstalled'.format(name)
return ret
# Check for test=True
if __opts__['test'] is True:
ret['result'] = None
ret['comment'] = '{0} would be uninstalled'.format(name)
ret['result'] = None
return ret
# Uninstall the KB
__salt__['wusa.uninstall'](name)
# Verify successful uninstall
if not __salt__['wusa.is_installed'](name):
ret['comment'] = '{0} was uninstalled'.format(name)
ret['changes'] = {'old': True, 'new': False}
ret['result'] = True
else:
ret['comment'] = '{0} failed to uninstall'.format(name)
return ret
|
python
|
def connect_sqs(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.sqs.connection.SQSConnection`
:return: A connection to Amazon's SQS
"""
from boto.sqs.connection import SQSConnection
return SQSConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
|
python
|
def set_cli_options(config, arguments=None):
"""Set any configuration options which have a CLI value set.
Args:
config (confpy.core.config.Configuration): A configuration object which
has been initialized with options.
arguments (iter of str): An iterable of strings which contains the CLI
arguments passed. If nothing is give then sys.argv is used.
Returns:
confpy.core.config.Configuration: A configuration object with CLI
values set.
The pattern to follow when setting CLI values is:
<section>_<option>
Each value should be lower case and separated by underscores.
"""
arguments = arguments or sys.argv[1:]
parser = argparse.ArgumentParser()
for section_name, section in config:
for option_name, _ in section:
var_name = '{0}_{1}'.format(
section_name.lower(),
option_name.lower(),
)
parser.add_argument('--{0}'.format(var_name))
args, _ = parser.parse_known_args(arguments)
args = vars(args)
for section_name, section in config:
for option_name, _ in section:
var_name = '{0}_{1}'.format(
section_name.lower(),
option_name.lower(),
)
value = args.get(var_name)
if value:
setattr(section, option_name, value)
return config
|
python
|
def acquire_filename(self, name):
"""Acquire a file name and return its id and its signature.
"""
id_ = self.__id_lut.get(name)
# Is this a new entry?
if id_ is None:
# then create one...
id_ = self.__next_id
self.__next_id += 1
self.__id_lut[name] = id_
entry = filename_entry_t(name)
self.__entries[id_] = entry
else:
# otherwise obtain the entry...
entry = self.__entries[id_]
entry.inc_ref_count()
return id_, self._get_signature(entry)
|
python
|
def unhexlify(blob):
"""
Takes a hexlified script and turns it back into a string of Python code.
"""
lines = blob.split('\n')[1:]
output = []
for line in lines:
# Discard the address, length etc. and reverse the hexlification
output.append(binascii.unhexlify(line[9:-2]))
# Check the header is correct ("MP<size>")
if (output[0][0:2].decode('utf-8') != u'MP'):
return ''
# Strip off header
output[0] = output[0][4:]
# and strip any null bytes from the end
output[-1] = output[-1].strip(b'\x00')
script = b''.join(output)
try:
result = script.decode('utf-8')
return result
except UnicodeDecodeError:
# Return an empty string because in certain rare circumstances (where
# the source hex doesn't include any embedded Python code) this
# function may be passed in "raw" bytes from MicroPython.
return ''
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.