language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | @JRubyMethod
public static IRubyObject initialize(IRubyObject self) {
((RubyObject)self).fastSetInstanceVariable("@tags", RubyHash.newHash(self.getRuntime()));
return self;
} |
python | def integrate(self, tmax, exact_finish_time=1):
"""
Main integration function. Call this function when you have setup your simulation and want to integrate it forward (or backward) in time. The function might be called many times to integrate the simulation in steps and create outputs in-between steps.
Parameters
----------
tmax : float
The final time of your simulation. If the current time is 100, and tmax=200, then after the calling the integrate routine, the time has advanced to t=200. If tmax is larger than or equal to the current time, no integration will be performed.
exact_finish_time: int, optional
This argument determines whether REBOUND should try to finish at the exact time (tmax) you give it or if it is allowed to overshoot. Overshooting could happen if one starts at t=0, has a timestep of dt=10 and wants to integrate to tmax=25. With ``exact_finish_time=1``, the integrator will choose the last timestep such that t is exactly 25 after the integration, otherwise t=30. Note that changing the timestep does affect the accuracy of symplectic integrators negatively.
Exceptions
----------
Exceptions are thrown when no more particles are left in the simulation or when a generic integration error occured.
If you specified exit_min_distance or exit_max_distance, then additional exceptions might thrown for escaping particles or particles that undergo a clos encounter.
Examples
--------
The typical usage is as follows. Note the use of ``np.linspace`` to create equally spaced outputs.
Using ``np.logspace`` can be used to easily produce logarithmically spaced outputs.
>>> import numpy as np
>>> for time in np.linspace(0,100.,10):
>>> sim.integrate(time)
>>> perform_output(sim)
"""
if debug.integrator_package =="REBOUND":
self.exact_finish_time = c_int(exact_finish_time)
ret_value = clibrebound.reb_integrate(byref(self), c_double(tmax))
if ret_value == 1:
self.process_messages()
raise SimulationError("An error occured during the integration.")
if ret_value == 2:
raise NoParticles("No more particles left in simulation.")
if ret_value == 3:
raise Encounter("Two particles had a close encounter (d<exit_min_distance).")
if ret_value == 4:
raise Escape("A particle escaped (r>exit_max_distance).")
if ret_value == 5:
raise Escape("User caused exit. Simulation did not finish.") # should not occur in python
if ret_value == 6:
raise KeyboardInterrupt
if ret_value == 7:
raise Collision("Two particles collided (d < r1+r2)")
else:
debug.integrate_other_package(tmax,exact_finish_time)
self.process_messages() |
java | public static String getLibraryHeader(boolean verbose)
{
if (!verbose)
return calimero + " version " + version;
final StringBuffer buf = new StringBuffer();
buf.append(calimero).append(sep);
buf.append("version ").append(version).append(sep);
buf.append(tuwien).append(sep);
buf.append(group).append(sep);
buf.append(copyright);
return buf.toString();
} |
python | def get_vendor_extension_fields(mapping):
"""
Identify vendor extension fields and extract them into a new dictionary.
Examples:
>>> get_vendor_extension_fields({'test': 1})
{}
>>> get_vendor_extension_fields({'test': 1, 'x-test': 2})
{'x-test': 2}
"""
return {k: v for k, v in mapping.items() if k.startswith('x-')} |
python | def handle_exception(self, exc_info=None, rendered=False, source_hint=None):
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
global _make_traceback
if exc_info is None:
exc_info = sys.exc_info()
# the debugging module is imported when it's used for the first time.
# we're doing a lot of stuff there and for applications that do not
# get any exceptions in template rendering there is no need to load
# all of that.
if _make_traceback is None:
from jinja2.debug import make_traceback as _make_traceback
traceback = _make_traceback(exc_info, source_hint)
if rendered and self.exception_formatter is not None:
return self.exception_formatter(traceback)
if self.exception_handler is not None:
self.exception_handler(traceback)
exc_type, exc_value, tb = traceback.standard_exc_info
raise exc_type, exc_value, tb |
java | private void handleInProvisionalState(final SipPacket msg) throws SipPacketParseException {
if (msg.isRequest() && msg.isCancel()) {
transition(CallState.CANCELLING, msg);
return;
} else if (msg.isRequest()) {
// assuming this is either a re-transmission or
// a proxy case where the same request is captured
// multiple times so therefore just stay in the same
// state
transition(this.currentState, msg);
return;
}
final boolean isInvite = msg.isInvite();
final SipResponsePacket response = (SipResponsePacket) msg;
if (response.is100Trying()) {
transition(CallState.TRYING, msg);
} else if (response.isRinging()) {
if (this.ringingResponse == null) {
this.ringingResponse = msg.toResponse();
}
transition(CallState.RINGING, msg);
} else if (response.isSuccess() && isInvite) {
if (this.successResponse == null) {
this.successResponse = msg.toResponse();
}
transition(CallState.IN_CALL, msg);
} else if (response.isRedirect()) {
transition(CallState.REDIRECT, msg);
} else if (isRejected(response.getStatus())) {
transition(CallState.REJECTED, msg);
} else if (response.isClientError()) {
transition(CallState.FAILED, msg);
} else if (response.isServerError()) {
transition(CallState.FAILED, msg);
} else if (response.isGlobalError()) {
transition(CallState.FAILED, msg);
}
} |
java | public static Map<String, Map<String, String>> addContext(final String key, final Map<String, String> data,
final Map<String, Map<String, String>> context) {
final Map<String, Map<String, String>> newdata = new HashMap<>();
if (null != context) {
newdata.putAll(context);
}
newdata.put(key, data);
return newdata;
} |
python | def on_shutdown(self, broker):
"""Called during :meth:`Broker.shutdown`, informs callbacks registered
with :meth:`add_handle_cb` the connection is dead."""
_v and LOG.debug('%r.on_shutdown(%r)', self, broker)
fire(self, 'shutdown')
for handle, (persist, fn) in self._handle_map.iteritems():
_v and LOG.debug('%r.on_shutdown(): killing %r: %r', self, handle, fn)
fn(Message.dead(self.broker_shutdown_msg)) |
java | public static float max(final float a, final float b) {
if (a > b) {
return a;
}
if (a < b) {
return b;
}
/* if either arg is NaN, return NaN */
if (a != b) {
return Float.NaN;
}
/* min(+0.0,-0.0) == -0.0 */
/* 0x80000000 == Float.floatToRawIntBits(-0.0d) */
int bits = Float.floatToRawIntBits(a);
if (bits == 0x80000000) {
return b;
}
return a;
} |
java | public <A> A createAliasForProperty(Class<A> cl, Expression<?> path) {
return createProxy(cl, path);
} |
java | public boolean matches(Property property) {
return property.getName().equals(key) && (value == WILDCARD_VALUE || property.getValue().asString().equals(value));
} |
java | public ListRecoveryPointsByResourceResult withRecoveryPoints(RecoveryPointByResource... recoveryPoints) {
if (this.recoveryPoints == null) {
setRecoveryPoints(new java.util.ArrayList<RecoveryPointByResource>(recoveryPoints.length));
}
for (RecoveryPointByResource ele : recoveryPoints) {
this.recoveryPoints.add(ele);
}
return this;
} |
python | def _validate_publish_parameters(body, exchange, immediate, mandatory,
properties, routing_key):
"""Validate Publish Parameters.
:param bytes|str|unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param dict properties: Message properties
:param bool mandatory: Requires the message is published
:param bool immediate: Request immediate delivery
:raises AMQPInvalidArgument: Invalid Parameters
:return:
"""
if not compatibility.is_string(body):
raise AMQPInvalidArgument('body should be a string')
elif not compatibility.is_string(routing_key):
raise AMQPInvalidArgument('routing_key should be a string')
elif not compatibility.is_string(exchange):
raise AMQPInvalidArgument('exchange should be a string')
elif properties is not None and not isinstance(properties, dict):
raise AMQPInvalidArgument('properties should be a dict or None')
elif not isinstance(mandatory, bool):
raise AMQPInvalidArgument('mandatory should be a boolean')
elif not isinstance(immediate, bool):
raise AMQPInvalidArgument('immediate should be a boolean') |
java | public InterfaceType getSuperType()
{
if ( _intfDecl.getSuperinterfaces() == null )
return null;
for (InterfaceType intfType : _intfDecl.getSuperinterfaces())
{
InterfaceDeclaration superDecl = intfType.getDeclaration();
if ( superDecl != null )
{
if (superDecl.getAnnotation(ControlExtension.class) != null ||
superDecl.getAnnotation(ControlInterface.class) != null)
{
_superDecl = superDecl;
return intfType;
}
}
}
return null;
} |
python | def _parse_transaction_entry(entry):
""" Validate & parse a transaction into (date, action, value) tuple. """
parts = entry.split()
date_string = parts[0]
try:
date = datetime.datetime.strptime(date_string[:-1], '%Y-%m-%d').date()
except ValueError:
raise ValueError('Invalid date in vacationrc for entry: {}'.format(entry))
if len(parts) < 2:
raise ValueError('.vacationrc missing an action for entry: {}'.format(entry))
action = parts[1].lower()
if action not in ('days', 'rate', 'off', 'adjust', 'show'):
raise ValueError('Invalid action in vacationrc for entry: {}'.format(entry))
try:
value = float(parts[2])
except IndexError:
value = None
except (ValueError, TypeError):
raise ValueError('Invalid value in vacationrc for entry: {}'.format(entry))
return (date, action, value) |
java | @Override
public List<String> getFormats(final String aBaseName) {
Objects.requireNonNull(aBaseName);
return Arrays.asList(FORMAT);
} |
python | def reconfig(main_parser, args=sys.argv[1:]):
"""Parse any config paths and reconfigure defaults with them
http://docs.python.org/library/argparse.html#partial-parsing
Return parsed remaining arguments"""
parsed, remaining_args = parser().parse_known_args(args)
configure(parsed.config_paths, os.getcwd())
return main_parser().parse_args(remaining_args) |
java | public final void addAllHelperTexts(@NonNull final CharSequence... helperTexts) {
Condition.INSTANCE.ensureNotNull(helperTexts, "The array may not be null");
addAllHelperTexts(Arrays.asList(helperTexts));
} |
java | public ListenableFuture<KeyValue> compareAndSet(String key, SetValue setValue) {
checkThatDistributedStoreIsActive();
KayVeeCommand.CASCommand casCommand = new KayVeeCommand.CASCommand(getCommandId(), key, setValue.getExpectedValue(), setValue.getNewValue());
return issueCommandToCluster(casCommand);
} |
java | public static spilloverpolicy[] get_filtered(nitro_service service, String filter) throws Exception{
spilloverpolicy obj = new spilloverpolicy();
options option = new options();
option.set_filter(filter);
spilloverpolicy[] response = (spilloverpolicy[]) obj.getfiltered(service, option);
return response;
} |
java | public <T> CallOptions withOption(Key<T> key, T value) {
Preconditions.checkNotNull(key, "key");
Preconditions.checkNotNull(value, "value");
CallOptions newOptions = new CallOptions(this);
int existingIdx = -1;
for (int i = 0; i < customOptions.length; i++) {
if (key.equals(customOptions[i][0])) {
existingIdx = i;
break;
}
}
newOptions.customOptions = new Object[customOptions.length + (existingIdx == -1 ? 1 : 0)][2];
System.arraycopy(customOptions, 0, newOptions.customOptions, 0, customOptions.length);
if (existingIdx == -1) {
// Add a new option
newOptions.customOptions[customOptions.length] = new Object[] {key, value};
} else {
// Replace an existing option
newOptions.customOptions[existingIdx] = new Object[] {key, value};
}
return newOptions;
} |
python | def by_name(cls, session, name):
"""
Get a package from a given name.
:param session: SQLAlchemy session
:type session: :class:`sqlalchemy.Session`
:param name: name of the group
:type name: `unicode
:return: package instance
:rtype: :class:`pyshop.models.Group`
"""
return cls.first(session, where=(cls.name == name,)) |
java | public void setStart(int x1, int y1) {
start.x = x1;
start.y = y1;
needsRefresh = true;
} |
python | def trusted(self, scope=None):
"""Return list of [(scope, trusted key), ...] for given scope."""
trust = [(x['scope'], x['vk']) for x in self.data['verifiers']
if x['scope'] in (scope, '+')]
trust.sort(key=lambda x: x[0])
trust.reverse()
return trust |
python | def from_dict(cls, d, encoding='base64'):
'''
Construct a ``Report`` object from dictionary.
:type d: dictionary
:param d: dictionary representing the report
:param encoding: encoding of strings in the dictionary (default: 'base64')
:return: Report object
'''
report = Report(Report._decode(d['name'], encoding))
report.set_status(Report._decode(d['status'], encoding))
sub_reports = Report._decode(d['sub_reports'], encoding)
del d['sub_reports']
for k, v in d.items():
if k in sub_reports:
report.add(k, Report.from_dict(v))
else:
if k.lower() == 'status':
report.set_status(Report._decode(v, encoding))
else:
report.add(k, Report._decode(v, encoding))
return report |
java | @Deprecated
public List<Index> listIndices() {
InputStream response = null;
try {
URI uri = new DatabaseURIHelper(db.getDBUri()).path("_index").build();
response = client.couchDbClient.get(uri);
return getResponseList(response, client.getGson(), DeserializationTypes.INDICES);
} finally {
close(response);
}
} |
python | def find_available_port():
"""Find an available port.
Simple trick: open a socket to localhost, see what port was allocated.
Could fail in highly concurrent setups, though.
"""
s = socket.socket()
s.bind(('localhost', 0))
_address, port = s.getsockname()
s.close()
return port |
java | protected void createTraceArrayForParameters() {
// Static methods don't have an implicit "this" argment so start
// working with local var 0 instead of 1 for the parm list.
int localVarOffset = isStatic ? 0 : 1;
int syntheticArgs = 0;
// Use an heuristic to guess when we're in a nested class constructor.
// Nested classes that are not static get a synthetic reference to their
// parent as the first argument.
if (isConstructor() && getClassAdapter().isInnerClass() && argTypes.length > 1) {
String className = getClassAdapter().getClassInternalName();
String ownerName = className.substring(0, className.lastIndexOf("$"));
if (Type.getObjectType(ownerName).equals(argTypes[0])) {
syntheticArgs = 1;
}
}
// Build the object array that will hold the input args to the method.
visitLdcInsn(new Integer(argTypes.length - syntheticArgs));
visitTypeInsn(ANEWARRAY, "java/lang/Object");
for (int i = syntheticArgs; i < argTypes.length; i++) {
int j = i + localVarOffset;
visitInsn(DUP);
visitLdcInsn(new Integer(i - syntheticArgs));
boxLocalVar(argTypes[i], j, isArgumentSensitive(i));
visitInsn(AASTORE);
// Local variables can use more than one slot. (DJ)
// Account for those here by adding them to the local
// var offset.
localVarOffset += argTypes[i].getSize() - 1;
}
} |
python | def get_model_choices():
"""
Get the select options for the model selector
:return:
"""
result = []
for ct in ContentType.objects.order_by('app_label', 'model'):
try:
if issubclass(ct.model_class(), TranslatableModel):
result.append(
('{} - {}'.format(ct.app_label, ct.model.lower()),
'{} - {}'.format(ct.app_label.capitalize(), ct.model_class()._meta.verbose_name_plural))
)
except TypeError:
continue
return result |
python | def token_perplexity_micro(eval_data, predictions, scores, learner='ignored'):
'''
Return the micro-averaged per-token perplexity `exp(-score / num_tokens)`
computed over the entire corpus, as a length-1 list of floats.
The log scores in `scores` should be base e (`exp`, `log`).
>>> refs = [Instance(None, ''),
... Instance(None, ''),
... Instance(None, '2')]
>>> scores = [np.log(1.0), np.log(0.25), np.log(1 / 64.)]
>>> perplexity = token_perplexity_micro(refs, None, scores)
>>> [round(p) for p in perplexity]
... # sequence perplexities: [1, 4, 64]
... # per-token perplexities: [1, 4, 8]
... # micro-average: gmean([1, 4, 8, 8])
[4.0]
'''
lens = np.array([len(_maybe_tokenize(inst.output)) + 1 for inst in eval_data])
return [np.exp(np.average(-np.array(scores) / lens, weights=lens))] |
java | public void compose(List<URI> sources, URI destination, String contentType) throws IOException {
StorageResourceId destResource = StorageResourceId.fromObjectName(destination.toString());
List<String> sourceObjects =
Lists.transform(
sources, uri -> StorageResourceId.fromObjectName(uri.toString()).getObjectName());
gcs.compose(
destResource.getBucketName(), sourceObjects, destResource.getObjectName(), contentType);
} |
java | public Pattern<T, F> times(int from, int to) {
checkIfNoNotPattern();
checkIfQuantifierApplied();
this.quantifier = Quantifier.times(quantifier.getConsumingStrategy());
if (from == 0) {
this.quantifier.optional();
from = 1;
}
this.times = Times.of(from, to);
return this;
} |
python | def parse_duplicate_stats(self, stats_file):
"""
Parses sambamba markdup output, returns series with values.
:param str stats_file: sambamba output file with duplicate statistics.
"""
import pandas as pd
series = pd.Series()
try:
with open(stats_file) as handle:
content = handle.readlines() # list of strings per line
except:
return series
try:
line = [i for i in range(len(content)) if "single ends (among them " in content[i]][0]
series["single-ends"] = re.sub("\D", "", re.sub("\(.*", "", content[line]))
line = [i for i in range(len(content)) if " end pairs... done in " in content[i]][0]
series["paired-ends"] = re.sub("\D", "", re.sub("\.\.\..*", "", content[line]))
line = [i for i in range(len(content)) if " duplicates, sorting the list... done in " in content[i]][0]
series["duplicates"] = re.sub("\D", "", re.sub("\.\.\..*", "", content[line]))
except IndexError:
pass
return series |
java | public void setComplex_atIndex(int index, double real, double imag){
this.real[index] = real;
this.imag[index] = imag;
if(synchronizePowerSpectrum){
computePower(index);
}
} |
python | def ignore_broken_pipe():
""" If a shellish program has redirected stdio it is subject to erroneous
"ignored" exceptions during the interpretor shutdown. This essentially
beats the interpretor to the punch by closing them early and ignoring any
broken pipe exceptions. """
for f in sys.stdin, sys.stdout, sys.stderr:
try:
f.close()
except BrokenPipeError:
pass |
java | public BaseField setupField(int iFieldSeq)
{
BaseField field = null;
//if (iFieldSeq == 0)
//{
// field = new CounterField(this, ID, Constants.DEFAULT_FIELD_LENGTH, null, null);
// field.setHidden(true);
//}
//if (iFieldSeq == 1)
//{
// field = new RecordChangedField(this, LAST_CHANGED, Constants.DEFAULT_FIELD_LENGTH, null, null);
// field.setHidden(true);
//}
//if (iFieldSeq == 2)
//{
// field = new BooleanField(this, DELETED, Constants.DEFAULT_FIELD_LENGTH, null, new Boolean(false));
// field.setHidden(true);
//}
if (iFieldSeq == 3)
field = new StringField(this, KEY_FILENAME, 40, null, null);
if (iFieldSeq == 4)
field = new ShortField(this, KEY_NUMBER, 2, null, null);
if (iFieldSeq == 5)
field = new StringField(this, KEY_FIELD_1, 40, null, null);
if (iFieldSeq == 6)
field = new StringField(this, KEY_FIELD_2, 40, null, null);
if (iFieldSeq == 7)
field = new StringField(this, KEY_FIELD_3, 40, null, null);
if (iFieldSeq == 8)
field = new StringField(this, KEY_FIELD_4, 40, null, null);
if (iFieldSeq == 9)
field = new StringField(this, KEY_FIELD_5, 40, null, null);
if (iFieldSeq == 10)
field = new StringField(this, KEY_FIELD_6, 40, null, null);
if (iFieldSeq == 11)
field = new StringField(this, KEY_FIELD_7, 40, null, null);
if (iFieldSeq == 12)
field = new StringField(this, KEY_FIELD_8, 40, null, null);
if (iFieldSeq == 13)
field = new StringField(this, KEY_FIELD_9, 40, null, null);
if (iFieldSeq == 14)
field = new StringField(this, KEY_NAME, 40, null, null);
if (iFieldSeq == 15)
field = new KeyTypeField(this, KEY_TYPE, 1, null, "KeyTypeField.UNIQUE");
if (iFieldSeq == 16)
{
field = new IncludeScopeField(this, INCLUDE_SCOPE, Constants.DEFAULT_FIELD_LENGTH, null, new Integer(0x004));
field.addListener(new InitOnceFieldHandler(null));
}
if (field == null)
field = super.setupField(iFieldSeq);
return field;
} |
python | def write_size (self, url_data):
"""Write url_data.size."""
self.writeln(u"<tr><td>"+self.part("dlsize")+u"</td><td>"+
strformat.strsize(url_data.size)+
u"</td></tr>") |
java | public void marshall(DescribeSubscriptionFiltersRequest describeSubscriptionFiltersRequest, ProtocolMarshaller protocolMarshaller) {
if (describeSubscriptionFiltersRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeSubscriptionFiltersRequest.getLogGroupName(), LOGGROUPNAME_BINDING);
protocolMarshaller.marshall(describeSubscriptionFiltersRequest.getFilterNamePrefix(), FILTERNAMEPREFIX_BINDING);
protocolMarshaller.marshall(describeSubscriptionFiltersRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(describeSubscriptionFiltersRequest.getLimit(), LIMIT_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def factory(
cls, file_id=None, path=None, url=None, blob=None, mime=None,
prefer_local_download=True, prefer_str=False, create_instance=True
):
"""
Creates a new InputFile subclass instance fitting the given parameters.
:param prefer_local_download: If `True`, we download the file and send it to telegram. This is the default.
If `False`, we send Telegram just the URL, and they'll try to download it.
:type prefer_local_download: bool
:param prefer_str: Return just the `str` instead of a `InputFileUseFileID` or `InputFileUseUrl` object.
:type prefer_str: bool
:param create_instance: If we should return a instance ready to use (default),
or the building parts being a tuple of `(class, args_tuple, kwargs_dict)`.
Setting this to `False` is probably only ever required for internal usage
by the :class:`InputFile` constructor which uses this very factory.
:type create_instance: bool
:returns: if `create_instance=True` it returns a instance of some InputFile subclass or a string,
if `create_instance=False` it returns a tuple of the needed class, args and kwargs needed
to create a instance.
:rtype: InputFile|InputFileFromBlob|InputFileFromDisk|InputFileFromURL|str|tuple
"""
if create_instance:
clazz, args, kwargs = cls.factory(
file_id=file_id,
path=path,
url=url,
blob=blob,
mime=mime,
create_instance=False,
)
return clazz(*args, **kwargs)
if file_id:
if prefer_str:
assert_type_or_raise(file_id, str, parameter_name='file_id')
return str, (file_id,), dict()
# end if
return InputFileUseFileID, (file_id,), dict()
if blob:
name = "file"
suffix = ".blob"
if path:
name = os_path.basename(os_path.normpath(path)) # http://stackoverflow.com/a/3925147/3423324#last-part
name, suffix = os_path.splitext(name) # http://stackoverflow.com/a/541394/3423324#extension
elif url:
# http://stackoverflow.com/a/18727481/3423324#how-to-extract-a-filename-from-a-url
url = urlparse(url)
name = os_path.basename(url.path)
name, suffix = os_path.splitext(name)
# end if
if mime:
import mimetypes
suffix = mimetypes.guess_extension(mime)
suffix = '.jpg' if suffix == '.jpe' else suffix # .jpe -> .jpg
# end if
if not suffix or not suffix.strip().lstrip("."):
logger.debug("suffix was empty. Using '.blob'")
suffix = ".blob"
# end if
name = "{filename}{suffix}".format(filename=name, suffix=suffix)
return InputFileFromBlob, (blob,), dict(name=name, mime=mime)
if path:
return InputFileFromDisk, (path,), dict(mime=mime)
if url:
if prefer_local_download:
return InputFileFromURL, (url,), dict(mime=mime)
# end if
# else -> so we wanna let telegram handle it
if prefer_str:
assert_type_or_raise(url, str, parameter_name='url')
return str, (url,), dict()
# end if
return InputFileUseUrl, (url,), dict()
# end if
raise ValueError('Could not find a matching subclass. You might need to do it manually instead.') |
python | def get_widget(self, page, language, fallback=Textarea):
"""Given the name of a placeholder return a `Widget` subclass
like Textarea or TextInput."""
if isinstance(self.widget, str):
widget = get_widget(self.widget)
else:
widget = self.widget
try:
return widget(page=page, language=language)
except:
pass
return widget() |
python | def get_commands(self):
"""Gets command that have been run and have not been redacted.
"""
shutit_global.shutit_global_object.yield_to_draw()
s = ''
for c in self.build['shutit_command_history']:
if isinstance(c, str):
#Ignore commands with leading spaces
if c and c[0] != ' ':
s += c + '\n'
return s |
python | def rankdata(inlist):
"""
Ranks the data in inlist, dealing with ties appropritely. Assumes
a 1D inlist. Adapted from Gary Perlman's |Stat ranksort.
Usage: rankdata(inlist)
Returns: a list of length equal to inlist, containing rank scores
"""
n = len(inlist)
svec, ivec = shellsort(inlist)
sumranks = 0
dupcount = 0
newlist = [0] * n
for i in range(n):
sumranks = sumranks + i
dupcount = dupcount + 1
if i == n - 1 or svec[i] != svec[i + 1]:
averank = sumranks / float(dupcount) + 1
for j in range(i - dupcount + 1, i + 1):
newlist[ivec[j]] = averank
sumranks = 0
dupcount = 0
return newlist |
java | public static FsInfoSector create(Fat32BootSector bs) throws IOException {
final int offset = offset(bs);
if (offset == 0) throw new IOException(
"creating a FS info sector at offset 0 is strange");
final FsInfoSector result =
new FsInfoSector(bs.getDevice(), offset(bs));
result.init();
result.write();
return result;
} |
python | def GetLocations():
"""Return all cloud locations available to the calling alias."""
r = clc.v1.API.Call('post','Account/GetLocations',{})
if r['Success'] != True:
if clc.args: clc.v1.output.Status('ERROR',3,'Error calling %s. Status code %s. %s' % ('Account/GetLocations',r['StatusCode'],r['Message']))
raise Exception('Error calling %s. Status code %s. %s' % ('Account/GetLocations',r['StatusCode'],r['Message']))
elif int(r['StatusCode']) == 0:
clc.LOCATIONS = [x['Alias'] for x in r['Locations']]
return(r['Locations']) |
java | public static Ref callRef(Callable function, Scriptable thisObj,
Object[] args, Context cx)
{
if (function instanceof RefCallable) {
RefCallable rfunction = (RefCallable)function;
Ref ref = rfunction.refCall(cx, thisObj, args);
if (ref == null) {
throw new IllegalStateException(rfunction.getClass().getName()+".refCall() returned null");
}
return ref;
}
// No runtime support for now
String msg = getMessage1("msg.no.ref.from.function",
toString(function));
throw constructError("ReferenceError", msg);
} |
python | def _orient3dfast(plane, pd):
"""
Performs a fast 3D orientation test.
Parameters
----------
plane: (3,3) float, three points in space that define a plane
pd: (3,) float, a single point
Returns
-------
result: float, if greater than zero then pd is above the plane through
the given three points, if less than zero then pd is below
the given plane, and if equal to zero then pd is on the
given plane.
"""
pa, pb, pc = plane
adx = pa[0] - pd[0]
bdx = pb[0] - pd[0]
cdx = pc[0] - pd[0]
ady = pa[1] - pd[1]
bdy = pb[1] - pd[1]
cdy = pc[1] - pd[1]
adz = pa[2] - pd[2]
bdz = pb[2] - pd[2]
cdz = pc[2] - pd[2]
return (adx * (bdy * cdz - bdz * cdy)
+ bdx * (cdy * adz - cdz * ady)
+ cdx * (ady * bdz - adz * bdy)) |
python | def is_image(filename):
"""Determine if given filename is an image."""
# note: isfile() also accepts symlinks
return os.path.isfile(filename) and filename.lower().endswith(ImageExts) |
python | def log_level_from_vebosity(verbosity):
"""
Get the `logging` module log level from a verbosity.
:param verbosity: The number of times the `-v` option was specified.
:return: The corresponding log level.
"""
if verbosity == 0:
return logging.WARNING
if verbosity == 1:
return logging.INFO
return logging.DEBUG |
python | def update(self, match_set):
"""Update the classifier set from which the match set was drawn,
e.g. by applying a genetic algorithm. The match_set argument is the
MatchSet instance whose classifier set should be updated.
Usage:
match_set = model.match(situation)
match_set.select_action()
match_set.payoff = reward
model.algorithm.distribute_payoff(match_set)
model.algorithm.update(match_set)
Arguments:
match_set: A MatchSet instance for which the classifier set
from which it was drawn needs to be updated based on the
match set's payoff distribution.
Return: None
"""
assert isinstance(match_set, MatchSet)
assert match_set.model.algorithm is self
assert match_set.selected_action is not None
# Increment the iteration counter.
match_set.model.update_time_stamp()
action_set = match_set[match_set.selected_action]
# If the average number of iterations since the last update for
# each rule in the action set is too small, return early instead of
# applying the GA.
average_time_passed = (
match_set.model.time_stamp -
self._get_average_time_stamp(action_set)
)
if average_time_passed <= self.ga_threshold:
return
# Update the time step for each rule to indicate that they were
# updated by the GA.
self._set_timestamps(action_set)
# Select two parents from the action set, with probability
# proportionate to their fitness.
parent1 = self._select_parent(action_set)
parent2 = self._select_parent(action_set)
# With the probability specified in the parameters, apply the
# crossover operator to the parents. Otherwise, just take the
# parents unchanged.
if random.random() < self.crossover_probability:
condition1, condition2 = parent1.condition.crossover_with(
parent2.condition
)
else:
condition1, condition2 = parent1.condition, parent2.condition
# Apply the mutation operator to each child, randomly flipping
# their mask bits with a small probability.
condition1 = self._mutate(condition1, action_set.situation)
condition2 = self._mutate(condition2, action_set.situation)
# If the newly generated children are already present in the
# population (or if they should be subsumed due to GA subsumption)
# then simply increment the numerosities of the existing rules in
# the population.
new_children = []
for condition in condition1, condition2:
# If the parameters specify that GA subsumption should be
# performed, look for an accurate parent that can subsume the
# new child.
if self.do_ga_subsumption:
subsumed = False
for parent in parent1, parent2:
should_subsume = (
(parent.experience >
self.subsumption_threshold) and
parent.error < self.error_threshold and
parent.condition(condition)
)
if should_subsume:
if parent in action_set.model:
parent.numerosity += 1
self.prune(action_set.model)
else:
# Sometimes the parent is removed from a
# previous subsumption
parent.numerosity = 1
action_set.model.add(parent)
subsumed = True
break
if subsumed:
continue
# Provided the child has not already been subsumed and it is
# present in the population, just increment its numerosity.
# Otherwise, if the child has neither been subsumed nor does it
# already exist, remember it so we can add it to the classifier
# set in just a moment.
child = XCSClassifierRule(
condition,
action_set.action,
self,
action_set.model.time_stamp
)
if child in action_set.model:
action_set.model.add(child)
else:
new_children.append(child)
# If there were any children which weren't subsumed and weren't
# already present in the classifier set, add them.
if new_children:
average_reward = .5 * (
parent1.average_reward +
parent2.average_reward
)
error = .5 * (parent1.error + parent2.error)
# .1 * (average fitness of parents)
fitness = .05 * (
parent1.fitness +
parent2.fitness
)
for child in new_children:
child.average_reward = average_reward
child.error = error
child.fitness = fitness
action_set.model.add(child) |
python | def _varargs_checks_gen(self, decorated_function, function_spec, arg_specs):
""" Generate checks for positional variable argument (varargs) testing
:param decorated_function: function decorator
:param function_spec: function inspect information
:param arg_specs: argument specification (same as arg_specs in :meth:`.Verifier.decorate`)
:return: internal structure, that is used by :meth:`.Verifier._varargs_checks_test`
"""
inspected_varargs = function_spec.varargs
if inspected_varargs is not None and inspected_varargs in arg_specs.keys():
return self.check(
arg_specs[inspected_varargs], inspected_varargs, decorated_function
) |
python | def compute_verdict(self, results):
"""
Match results to the configured reject, quarantine and accept classes,
and return a verdict based on that.
The verdict classes are matched in the order: reject_classes,
quarantine_classes, accept_classes. This means that you can configure
different verdicts for different confidence results, for instance:
reject_classes= Spam:0.99 # Reject obvious spam
quarantine_classes = Spam:0.7 # Quarantine spam with confidence
# between 0.7 and 0.99
accept_classes = Spam # Accept low confidence spam (good
# for FP and retraining)
Args:
results -- A results dictionary from DspamClient.
"""
if results['class'] in self.reject_classes:
threshold = self.reject_classes[results['class']]
if float(results['confidence']) >= threshold:
logger.debug(
'<{0}> Suggesting to reject the message based on DSPAM '
'results: user={1[user]}, class={1[class]}, '
'confidence={1[confidence]}'.format(self.id, results))
return self.VERDICT_REJECT
if results['class'] in self.quarantine_classes:
threshold = self.quarantine_classes[results['class']]
if float(results['confidence']) >= threshold:
logger.debug(
'<{0}> Suggesting to quarantine the message based on '
'DSPAM results: user={1[user]}, class={1[class]}, '
'confidence={1[confidence]}'.format(self.id, results))
return self.VERDICT_QUARANTINE
if results['class'] in self.accept_classes:
threshold = self.accept_classes[results['class']]
if float(results['confidence']) >= threshold:
logger.debug(
'<{0}> Suggesting to accept the message based on DSPAM '
'results: user={1[user]}, class={1[class]}, '
'confidence={1[confidence]}'.format(self.id, results))
return self.VERDICT_ACCEPT
logger.debug(
'<{0}> Suggesting to accept the message, no verdict class matched '
'DSPAM results: user={1[user]}, class={1[class]}, '
'confidence={1[confidence]}'.format(self.id, results))
return self.VERDICT_ACCEPT |
java | public static boolean isSuperuser(String username)
{
UntypedResultSet result = selectUser(username);
return !result.isEmpty() && result.one().getBoolean("super");
} |
python | def aes_decrypt(key: bytes, cipher_text: bytes) -> bytes:
"""
AES-GCM decryption
Parameters
----------
key: bytes
AES session key, which derived from two secp256k1 keys
cipher_text: bytes
Encrypted text:
nonce(16 bytes) + tag(16 bytes) + encrypted data
Returns
-------
bytes
Plain text
>>> data = b'this is test data'
>>> key = get_valid_secret()
>>> aes_decrypt(key, aes_encrypt(key, data)) == data
True
>>> import os
>>> key = os.urandom(32)
>>> aes_decrypt(key, aes_encrypt(key, data)) == data
True
"""
nonce = cipher_text[:16]
tag = cipher_text[16:32]
ciphered_data = cipher_text[32:]
aes_cipher = AES.new(key, AES_CIPHER_MODE, nonce=nonce)
return aes_cipher.decrypt_and_verify(ciphered_data, tag) |
python | def make_sshable(c):
"""
Set up passwordless SSH keypair & authorized_hosts access to localhost.
"""
user = c.travis.sudo.user
home = "~{0}".format(user)
# Run sudo() as the new sudo user; means less chown'ing, etc.
c.config.sudo.user = user
ssh_dir = "{0}/.ssh".format(home)
# TODO: worth wrapping in 'sh -c' and using '&&' instead of doing this?
for cmd in ("mkdir {0}", "chmod 0700 {0}"):
c.sudo(cmd.format(ssh_dir, user))
c.sudo('ssh-keygen -f {0}/id_rsa -N ""'.format(ssh_dir))
c.sudo("cp {0}/{{id_rsa.pub,authorized_keys}}".format(ssh_dir)) |
java | protected void recordLocalNSDecl(Node node) {
NamedNodeMap atts = ((Element) node).getAttributes();
int length = atts.getLength();
for (int i = 0; i < length; i++) {
Node attr = atts.item(i);
String localName = attr.getLocalName();
String attrPrefix = attr.getPrefix();
String attrValue = attr.getNodeValue();
String attrNS = attr.getNamespaceURI();
localName =
localName == null
|| XMLNS_PREFIX.equals(localName) ? "" : localName;
attrPrefix = attrPrefix == null ? "" : attrPrefix;
attrValue = attrValue == null ? "" : attrValue;
attrNS = attrNS == null ? "" : attrNS;
// check if attribute is a namespace decl
if (XMLNS_URI.equals(attrNS)) {
// No prefix may be bound to http://www.w3.org/2000/xmlns/.
if (XMLNS_URI.equals(attrValue)) {
String msg =
Utils.messages.createMessage(
MsgKey.ER_NS_PREFIX_CANNOT_BE_BOUND,
new Object[] { attrPrefix, XMLNS_URI });
if (fErrorHandler != null) {
fErrorHandler.handleError(
new DOMErrorImpl(
DOMError.SEVERITY_ERROR,
msg,
MsgKey.ER_NS_PREFIX_CANNOT_BE_BOUND,
null,
null,
null));
}
} else {
// store the namespace-declaration
if (XMLNS_PREFIX.equals(attrPrefix) ) {
// record valid decl
if (attrValue.length() != 0) {
fNSBinder.declarePrefix(localName, attrValue);
} else {
// Error; xmlns:prefix=""
}
} else { // xmlns
// empty prefix is always bound ("" or some string)
fNSBinder.declarePrefix("", attrValue);
}
}
}
}
} |
python | def _strip_ctype(name, ctype, protocol=2):
"""Strip the ctype from a channel name for the given nds server version
This is needed because NDS1 servers store trend channels _including_
the suffix, but not raw channels, and NDS2 doesn't do this.
"""
# parse channel type from name (e.g. 'L1:GDS-CALIB_STRAIN,reduced')
try:
name, ctypestr = name.rsplit(',', 1)
except ValueError:
pass
else:
ctype = Nds2ChannelType.find(ctypestr).value
# NDS1 stores channels with trend suffix, so we put it back:
if protocol == 1 and ctype in (
Nds2ChannelType.STREND.value,
Nds2ChannelType.MTREND.value
):
name += ',{0}'.format(ctypestr)
return name, ctype |
python | def path(filename):
"""Return full filename path for filename"""
filename = unmap_file(filename)
if filename not in file_cache:
return None
return file_cache[filename].path |
java | public String splice(DNASequence sequence) {
StringBuilder subData = new StringBuilder();
Location last = null;
for (FeatureI f : this) {
Location loc = f.location();
if (last == null || loc.startsAfter(last)) {
subData.append(sequence.getSubSequence(loc.start(), loc.end()).toString());
last = loc;
} else {
throw new IllegalStateException("Splice: Feature locations should not overlap.");
}
}
return subData.toString();
} |
java | public static Polygon makePolygon(Geometry shell, Geometry... holes) throws IllegalArgumentException {
if(shell == null) {
return null;
}
LinearRing outerLine = checkLineString(shell);
LinearRing[] interiorlinestrings = new LinearRing[holes.length];
for (int i = 0; i < holes.length; i++) {
interiorlinestrings[i] = checkLineString(holes[i]);
}
return shell.getFactory().createPolygon(outerLine, interiorlinestrings);
} |
java | protected void fireAnnounceResponseEvent(int complete, int incomplete, int interval, String hexInfoHash) {
for (AnnounceResponseListener listener : this.listeners) {
listener.handleAnnounceResponse(interval, complete, incomplete, hexInfoHash);
}
} |
python | def pmap(func, args, processes=None, callback=lambda *_, **__: None, **kwargs):
"""pmap(func, args, processes=None, callback=do_nothing, **kwargs)
Parallel equivalent of ``map(func, args)``, with the additional ability of
providing keyword arguments to func, and a callback function which is
applied to each element in the returned list. Unlike map, the output is a
non-lazy list. If *processes* is 1, no thread pool is used.
**Parameters**
func : function
The function to map.
args : iterable
The arguments to map *func* over.
processes : int or None, optional
The number of processes in the thread pool. If only 1, no thread pool
is used to avoid useless overhead. If None, the number is chosen based
on your system by :class:`multiprocessing.Pool` (default None).
callback : function, optional
Function to call on the return value of ``func(arg)`` for each *arg*
in *args* (default do_nothing).
kwargs : dict
Extra keyword arguments are unpacked in each call of *func*.
**Returns**
results : list
A list equivalent to ``[func(x, **kwargs) for x in args]``.
"""
if processes is 1:
results = []
for arg in args:
result = func(arg, **kwargs)
results.append(result)
callback(result)
return results
else:
with Pool() if processes is None else Pool(processes) as p:
results = [p.apply_async(func, (arg,), kwargs, callback)
for arg in args]
return [result.get() for result in results] |
python | def component_activated(self, component):
"""Initialize additional member variables for components.
Every component activated through the `Environment` object
gets an additional member variable: `env` (the environment object)
"""
component.env = self
super(Environment, self).component_activated(component) |
python | def sizeOfOverlap(self, e):
"""
Get the size of the overlap between self and e.
:return: the number of bases that are shared in common between self and e.
"""
# no overlap
if not self.intersects(e):
return 0
# complete inclusion..
if e.start >= self.start and e.end <= self.end:
return len(e)
if self.start >= e.start and self.end <= e.end:
return len(self)
# partial overlap
if e.start > self.start:
return (self.end - e.start)
if self.start > e.start:
return (e.end - self.start) |
python | def estimate(self):
""" Returns the estimate of the cardinality """
E = self.alpha * float(self.m ** 2) / np.power(2.0, - self.M).sum()
if E <= 2.5 * self.m: # Small range correction
V = self.m - np.count_nonzero(self.M)
return int(self.m * np.log(self.m / float(V))) if V > 0 else int(E)
# intermidiate range correction -> No correction
elif E <= float(long(1) << self.precision) / 30.0:
return int(E)
else:
return int(-(long(1) << self.precision) *
np.log(1.0 - E / (long(1) << self.precision))) |
python | def fs_decode(path):
"""
Decode a filesystem path using the proper filesystem encoding
:param path: The filesystem path to decode from bytes or string
:return: The filesystem path, decoded with the determined encoding
:rtype: Text
"""
path = _get_path(path)
if path is None:
raise TypeError("expected a valid path to decode")
if isinstance(path, six.binary_type):
if six.PY2:
from array import array
indexes = _invalid_utf8_indexes(array(str("B"), path))
return "".join(
chunk.decode(_fs_encoding, _fs_decode_errors)
for chunk in _chunks(path, indexes)
)
return path.decode(_fs_encoding, _fs_decode_errors)
return path |
java | private int allocateNode(int d) {
int id = 1;
int initial = - (1 << d); // has last d bits = 0 and rest all = 1
byte val = value(id);
if (val > d) { // unusable
return -1;
}
while (val < d || (id & initial) == 0) { // id & initial == 1 << d for all ids at depth d, for < d it is 0
id <<= 1;
val = value(id);
if (val > d) {
id ^= 1;
val = value(id);
}
}
byte value = value(id);
assert value == d && (id & initial) == 1 << d : String.format("val = %d, id & initial = %d, d = %d",
value, id & initial, d);
setValue(id, unusable); // mark as unusable
updateParentsAlloc(id);
return id;
} |
python | def write_ch (self, ch):
'''This puts a character at the current cursor position. The cursor
position is moved forward with wrap-around, but no scrolling is done if
the cursor hits the lower-right corner of the screen. '''
if isinstance(ch, bytes):
ch = self._decode(ch)
#\r and \n both produce a call to cr() and lf(), respectively.
ch = ch[0]
if ch == u'\r':
self.cr()
return
if ch == u'\n':
self.crlf()
return
if ch == chr(screen.BS):
self.cursor_back()
return
self.put_abs(self.cur_r, self.cur_c, ch)
old_r = self.cur_r
old_c = self.cur_c
self.cursor_forward()
if old_c == self.cur_c:
self.cursor_down()
if old_r != self.cur_r:
self.cursor_home (self.cur_r, 1)
else:
self.scroll_up ()
self.cursor_home (self.cur_r, 1)
self.erase_line() |
java | public final void ruleOpMultiAssign() throws RecognitionException {
int stackSize = keepStackSize();
try {
// InternalXbase.g:146:2: ( ( ( rule__OpMultiAssign__Alternatives ) ) )
// InternalXbase.g:147:2: ( ( rule__OpMultiAssign__Alternatives ) )
{
// InternalXbase.g:147:2: ( ( rule__OpMultiAssign__Alternatives ) )
// InternalXbase.g:148:3: ( rule__OpMultiAssign__Alternatives )
{
if ( state.backtracking==0 ) {
before(grammarAccess.getOpMultiAssignAccess().getAlternatives());
}
// InternalXbase.g:149:3: ( rule__OpMultiAssign__Alternatives )
// InternalXbase.g:149:4: rule__OpMultiAssign__Alternatives
{
pushFollow(FOLLOW_2);
rule__OpMultiAssign__Alternatives();
state._fsp--;
if (state.failed) return ;
}
if ( state.backtracking==0 ) {
after(grammarAccess.getOpMultiAssignAccess().getAlternatives());
}
}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
restoreStackSize(stackSize);
}
return ;
} |
python | def plot(self, figure_list):
'''
When each subscript is called, uses its standard plotting
Args:
figure_list: list of figures passed from the guit
'''
#TODO: be smarter about how we plot ScriptIterator
if self._current_subscript_stage is not None:
if self._current_subscript_stage['current_subscript'] is not None:
self._current_subscript_stage['current_subscript'].plot(figure_list)
if (self.is_running is False) and not (self.data == {} or self.data is None):
script_names = list(self.settings['script_order'].keys())
script_indices = [self.settings['script_order'][name] for name in script_names]
_, sorted_script_names = list(zip(*sorted(zip(script_indices, script_names))))
last_script = self.scripts[sorted_script_names[-1]]
last_script.force_update() # since we use the last script plot function we force it to refresh
axes_list = last_script.get_axes_layout(figure_list)
# catch error is _plot function doens't take optional data argument
try:
last_script._plot(axes_list, self.data)
except TypeError as err:
print((warnings.warn('can\'t plot average script data because script.plot function doens\'t take data as optional argument. Plotting last data set instead')))
print((err.message))
last_script.plot(figure_list) |
python | def center_widget_on_screen(widget, screen=None):
"""
Centers given Widget on the screen.
:param widget: Current Widget.
:type widget: QWidget
:param screen: Screen used for centering.
:type screen: int
:return: Definition success.
:rtype: bool
"""
screen = screen and screen or QApplication.desktop().primaryScreen()
desktop_width = QApplication.desktop().screenGeometry(screen).width()
desktop_height = QApplication.desktop().screenGeometry(screen).height()
widget.move(desktop_width / 2 - widget.sizeHint().width() / 2, desktop_height / 2 - widget.sizeHint().height() / 2)
return True |
java | @Override
public void start() {
super.start();
// load the resource index for phase II use.
String resIdxURL = syscfg.getResourceIndexURL();
try {
final ResolvedResource resolvedResource = cache.resolveResource(
ResourceType.RESOURCE_INDEX, resIdxURL);
ResourceIndex.INSTANCE.loadIndex(resolvedResource
.getCacheResourceCopy());
} catch (ResourceDownloadError e) {
stageError(e.getUserFacingMessage());
ResourceIndex.INSTANCE.loadIndex();
} catch (FileNotFoundException e) {
defaultIndex();
} catch (XMLStreamException e) {
defaultIndex();
}
processOutputDirectory();
} |
python | def makePalette(color1, color2, N, hsv=True):
"""
Generate N colors starting from `color1` to `color2`
by linear interpolation HSV in or RGB spaces.
:param int N: number of output colors.
:param color1: first rgb color.
:param color2: second rgb color.
:param bool hsv: if `False`, interpolation is calculated in RGB space.
.. hint:: Example: |colorpalette.py|_
"""
if hsv:
color1 = rgb2hsv(color1)
color2 = rgb2hsv(color2)
c1 = np.array(getColor(color1))
c2 = np.array(getColor(color2))
cols = []
for f in np.linspace(0, 1, N - 1, endpoint=True):
c = c1 * (1 - f) + c2 * f
if hsv:
c = np.array(hsv2rgb(c))
cols.append(c)
return cols |
java | @SuppressWarnings({"WeakerAccess"})
public static void createNotification(final Context context, final Bundle extras) {
createNotification(context, extras, Constants.EMPTY_NOTIFICATION_ID);
} |
python | def flush(self):
"""Empty the local queue and send its elements to be executed remotely.
"""
for elem in self:
if elem.id[0] != scoop.worker:
elem._delete()
self.socket.sendFuture(elem)
self.ready.clear()
self.movable.clear() |
java | public boolean inRanges(Instance instance, double[][] ranges) {
boolean isIn = true;
// updateRangesFirst must have been called on ranges
for (int j = 0; isIn && (j < ranges.length); j++) {
if (!instance.isMissing(j)) {
double value = instance.value(j);
isIn = value <= ranges[j][R_MAX];
if (isIn) isIn = value >= ranges[j][R_MIN];
}
}
return isIn;
} |
python | def plotCastro(castroData, ylims, nstep=25, zlims=None):
""" Make a color plot (castro plot) of the
delta log-likelihood as a function of
energy and flux normalization
castroData : A CastroData object, with the
log-likelihood v. normalization for each energy bin
ylims : y-axis limits
nstep : Number of y-axis steps to plot for each energy bin
zlims : z-axis limits
returns fig,ax,im,ztmp which are matplotlib figure, axes and image objects
"""
xlabel = "Energy [MeV]"
ylabel = NORM_LABEL[castroData.norm_type]
return plotCastro_base(castroData, ylims,
xlabel, ylabel, nstep, zlims) |
java | @XmlElementDecl(namespace = "http://www.ibm.com/websphere/wim", name = "carLicense")
public JAXBElement<String> createCarLicense(String value) {
return new JAXBElement<String>(_CarLicense_QNAME, String.class, null, value);
} |
java | public AtomixConfig setPartitionGroups(Map<String, PartitionGroupConfig<?>> partitionGroups) {
partitionGroups.forEach((name, group) -> group.setName(name));
this.partitionGroups = partitionGroups;
return this;
} |
python | def btc_is_multisig_segwit(privkey_info):
"""
Does the given private key info represent
a multisig bundle?
For Bitcoin, this is true for multisig p2sh (not p2sh-p2wsh)
"""
try:
jsonschema.validate(privkey_info, PRIVKEY_MULTISIG_SCHEMA)
if len(privkey_info['private_keys']) == 1:
return False
return privkey_info.get('segwit', False)
except ValidationError as e:
return False |
python | def _relative_position_to_absolute_position_unmasked(x):
"""Converts tensor from relative to aboslute indexing for local attention.
Args:
x: a Tensor of shape [batch (or batch*num_blocks), heads,
length, 2 * length - 1]
Returns:
A Tensor of shape [batch (or batch*num_blocks), heads, length, length-1]
"""
x_shape = common_layers.shape_list(x)
batch = x_shape[0]
heads = x_shape[1]
length = x_shape[2]
# Concat columns of pad to shift from relative to absolute indexing.
col_pad = tf.zeros((batch, heads, length, 1))
x = tf.concat([x, col_pad], axis=3)
# Concat extra elements so to add up to shape (len+1, 2*len-1).
flat_x = tf.reshape(x, [batch, heads, length * 2 * length])
flat_pad = tf.zeros((batch, heads, length-1))
flat_x_padded = tf.concat([flat_x, flat_pad], axis=2)
# Reshape and slice out the padded elements.
final_x = tf.reshape(flat_x_padded, [batch, heads, length+1, 2*length-1])
final_x = final_x[:, :, :, length-1:]
final_x = final_x[:, :, :length, :]
return final_x |
java | protected RpcInvocation createRpcInvocationMessage(
final String methodName,
final Class<?>[] parameterTypes,
final Object[] args) throws IOException {
final RpcInvocation rpcInvocation;
if (isLocal) {
rpcInvocation = new LocalRpcInvocation(
methodName,
parameterTypes,
args);
} else {
try {
RemoteRpcInvocation remoteRpcInvocation = new RemoteRpcInvocation(
methodName,
parameterTypes,
args);
if (remoteRpcInvocation.getSize() > maximumFramesize) {
throw new IOException("The rpc invocation size exceeds the maximum akka framesize.");
} else {
rpcInvocation = remoteRpcInvocation;
}
} catch (IOException e) {
LOG.warn("Could not create remote rpc invocation message. Failing rpc invocation because...", e);
throw e;
}
}
return rpcInvocation;
} |
python | def agent_show(self, agent_id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/chat/agents#get-agent-by-id"
api_path = "/api/v2/agents/{agent_id}"
api_path = api_path.format(agent_id=agent_id)
return self.call(api_path, **kwargs) |
python | def parse(readDataInstance):
"""
Returns a L{Directory}-like object.
@type readDataInstance: L{ReadData}
@param readDataInstance: L{ReadData} object to read from.
@rtype: L{Directory}
@return: L{Directory} object.
"""
d = Directory()
d.rva.value = readDataInstance.readDword()
d.size.value = readDataInstance.readDword()
return d |
java | void visitProperty(
@Nonnull TypedElement type,
@Nonnull String name,
@Nullable MethodElement readMethod,
@Nullable MethodElement writeMethod,
boolean isReadOnly,
@Nullable AnnotationMetadata annotationMetadata,
@Nullable Map<String, ClassElement> typeArguments) {
final Type propertyType = getTypeForElement(type);
propertyDefinitions.add(
new BeanPropertyWriter(
this,
propertyType,
name,
readMethod,
writeMethod,
isReadOnly,
propertyIndex++,
annotationMetadata,
typeArguments
));
} |
python | def wrap(self, availWidth, availHeight):
" This can be called more than once! Do not overwrite important data like drawWidth "
availHeight = self.setMaxHeight(availHeight)
# print "image wrap", id(self), availWidth, availHeight, self.drawWidth, self.drawHeight
width = min(self.drawWidth, availWidth)
wfactor = float(width) / self.drawWidth
height = min(self.drawHeight, availHeight * MAX_IMAGE_RATIO)
hfactor = float(height) / self.drawHeight
factor = min(wfactor, hfactor)
self.dWidth = self.drawWidth * factor
self.dHeight = self.drawHeight * factor
# print "imgage result", factor, self.dWidth, self.dHeight
return self.dWidth, self.dHeight |
java | public static int cs_tdfs(int j, int k, int[] head, int head_offset, int[] next, int next_offset, int[] post,
int post_offset, int[] stack, int stack_offset) {
int i, p, top = 0;
if (head == null || next == null || post == null || stack == null)
return (-1); /* check inputs */
stack[stack_offset + 0] = j; /* place j on the stack */
while (top >= 0) /* while (stack is not empty) */
{
p = stack[stack_offset + top]; /* p = top of stack */
i = head[head_offset + p]; /* i = youngest child of p */
if (i == -1) {
top--; /* p has no unordered children left */
post[post_offset + (k++)] = p; /* node p is the kth postordered node */
} else {
head[head_offset + p] = next[next_offset + i]; /* remove i from children of p */
stack[stack_offset + (++top)] = i; /* start dfs on child node i */
}
}
return (k);
} |
java | private void addDCTriples(Datastream ds,
URIReference objURI,
Set<Triple> set) throws Exception {
DCFields dc = new DCFields(ds.getContentStream());
Map<RDFName, List<DCField>> map = dc.getMap();
for (RDFName predicate : map.keySet()) {
for (DCField dcField : map.get(predicate)) {
String lang = dcField.getLang();
if (lang == null) {
add(objURI, predicate, dcField.getValue(), set);
} else {
add(objURI, predicate, dcField.getValue(), lang, set);
}
}
}
} |
python | def get(self, id_vlan):
"""Get a VLAN by your primary key.
Network IPv4/IPv6 related will also be fetched.
:param id_vlan: ID for VLAN.
:return: Following dictionary:
::
{'vlan': {'id': < id_vlan >,
'nome': < nome_vlan >,
'num_vlan': < num_vlan >,
'id_ambiente': < id_ambiente >,
'descricao': < descricao >,
'acl_file_name': < acl_file_name >,
'acl_valida': < acl_valida >,
'acl_file_name_v6': < acl_file_name_v6 >,
'acl_valida_v6': < acl_valida_v6 >,
'ativada': < ativada >,
'redeipv4': [ { all networkipv4 related } ],
'redeipv6': [ { all networkipv6 related } ] } }
:raise InvalidParameterError: Invalid ID for VLAN.
:raise VlanNaoExisteError: VLAN not found.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
if not is_valid_int_param(id_vlan):
raise InvalidParameterError(
u'Parameter id_vlan is invalid. Value: ' +
id_vlan)
url = 'vlan/' + str(id_vlan) + '/network/'
code, xml = self.submit(None, 'GET', url)
return get_list_map(
self.response(
code, xml, [
'redeipv4', 'redeipv6']), 'vlan') |
java | static final public DateFormat getDateInstance(Calendar cal, int dateStyle) {
return getDateInstance(cal, dateStyle, ULocale.getDefault(Category.FORMAT));
} |
java | private void cleanupSession(){
eventThread.queueClientEvent(new ClientSessionEvent(SessionEvent.CLOSED));
watcherManager.cleanup();
session.id = "";
session.password = null;
session.serverId = -1;
onSessionClose();
} |
python | def predictions(self, stpid="", rt="", vid="", maxpredictions=""):
"""
Retrieve predictions for 1+ stops or 1+ vehicles.
Arguments:
`stpid`: unique ID number for bus stop (single or comma-seperated list or iterable)
or
`vid`: vehicle ID number (single or comma-seperated list or iterable)
or
`stpid` and `rt`
`maxpredictions` (optional): limit number of predictions returned
Response:
`prd`: (prediction container) contains list of
`tmstp`: when prediction was generated
`typ`: prediction type ('A' = arrival, 'D' = departure)
`stpid`: stop ID for prediction
`stpnm`: stop name for prediction
`vid`: vehicle ID for prediction
`dstp`: vehicle distance to stop (feet)
`rt`: bus route
`des`: bus destination
`prdtm`: ETA/ETD
`dly`: True if bus delayed
`tablockid`, `tatripid`, `zone`: internal, see `self.vehicles`
http://realtime.portauthority.org/bustime/apidoc/v1/main.jsp?section=predictions.jsp
"""
if (stpid and vid) or (rt and vid):
raise ValueError("These parameters cannot be specified simultaneously.")
elif not (stpid or rt or vid):
raise ValueError("You must specify a parameter.")
if listlike(stpid): stpid = ",".join(stpid)
if listlike(rt): rt = ",".join(rt)
if listlike(vid): vid = ",".join(vid)
if stpid or (rt and stpid) or vid:
url = self.endpoint('PREDICTION', dict(rt=rt, stpid=stpid, vid=vid, top=maxpredictions))
return self.response(url) |
python | def _dump_spec(spec):
"""Dump bel specification dictionary using YAML
Formats this with an extra indentation for lists to make it easier to
use cold folding on the YAML version of the spec dictionary.
"""
with open("spec.yaml", "w") as f:
yaml.dump(spec, f, Dumper=MyDumper, default_flow_style=False) |
java | protected <T> Optional<T> getTarget(Event event, Class<T> type) {
return getValue(event, event::getTarget, type);
} |
java | private void checkPolicy(X509Certificate currCert)
throws CertPathValidatorException
{
String msg = "certificate policies";
if (debug != null) {
debug.println("PolicyChecker.checkPolicy() ---checking " + msg
+ "...");
debug.println("PolicyChecker.checkPolicy() certIndex = "
+ certIndex);
debug.println("PolicyChecker.checkPolicy() BEFORE PROCESSING: "
+ "explicitPolicy = " + explicitPolicy);
debug.println("PolicyChecker.checkPolicy() BEFORE PROCESSING: "
+ "policyMapping = " + policyMapping);
debug.println("PolicyChecker.checkPolicy() BEFORE PROCESSING: "
+ "inhibitAnyPolicy = " + inhibitAnyPolicy);
debug.println("PolicyChecker.checkPolicy() BEFORE PROCESSING: "
+ "policyTree = " + rootNode);
}
X509CertImpl currCertImpl = null;
try {
currCertImpl = X509CertImpl.toImpl(currCert);
} catch (CertificateException ce) {
throw new CertPathValidatorException(ce);
}
boolean finalCert = (certIndex == certPathLen);
rootNode = processPolicies(certIndex, initPolicies, explicitPolicy,
policyMapping, inhibitAnyPolicy, rejectPolicyQualifiers, rootNode,
currCertImpl, finalCert);
if (!finalCert) {
explicitPolicy = mergeExplicitPolicy(explicitPolicy, currCertImpl,
finalCert);
policyMapping = mergePolicyMapping(policyMapping, currCertImpl);
inhibitAnyPolicy = mergeInhibitAnyPolicy(inhibitAnyPolicy,
currCertImpl);
}
certIndex++;
if (debug != null) {
debug.println("PolicyChecker.checkPolicy() AFTER PROCESSING: "
+ "explicitPolicy = " + explicitPolicy);
debug.println("PolicyChecker.checkPolicy() AFTER PROCESSING: "
+ "policyMapping = " + policyMapping);
debug.println("PolicyChecker.checkPolicy() AFTER PROCESSING: "
+ "inhibitAnyPolicy = " + inhibitAnyPolicy);
debug.println("PolicyChecker.checkPolicy() AFTER PROCESSING: "
+ "policyTree = " + rootNode);
debug.println("PolicyChecker.checkPolicy() " + msg + " verified");
}
} |
python | def main():
"""Main function for SPEAD sender module."""
# Check command line arguments.
if len(sys.argv) != 2:
raise RuntimeError('Usage: python3 async_send.py <json config>')
# Set up logging.
sip_logging.init_logger(show_thread=False)
# Load SPEAD configuration from JSON file.
# _path = os.path.dirname(os.path.abspath(__file__))
# with open(os.path.join(_path, 'spead_send.json')) as file_handle:
# spead_config = json.load(file_handle)
spead_config = json.loads(sys.argv[1])
try:
_path = os.path.dirname(os.path.abspath(__file__))
schema_path = os.path.join(_path, 'config_schema.json')
with open(schema_path) as schema_file:
schema = json.load(schema_file)
validate(spead_config, schema)
except ValidationError as error:
print(error.cause)
raise
# Set up the SPEAD sender and run it (see method, above).
sender = SpeadSender(spead_config)
sender.run() |
java | public static <S> ServiceLoader<S> load(Class<S> service, ClassLoader loader) {
if (loader == null) {
loader = service.getClassLoader();
}
return new ServiceLoader<S>(service, new ClassLoaderResourceLoader(loader));
} |
java | private boolean startsWith(ArchivePath fullPath, ArchivePath startingPath) {
final String context = fullPath.get();
final String startingContext = startingPath.get();
return context.startsWith(startingContext);
} |
java | public static String getStringForSign(ProductPayRequest request) {
Map<String, Object> params = new HashMap<String, Object>();
// 必选参数
params.put(HwPayConstant.KEY_MERCHANTID, request.getMerchantId());
params.put(HwPayConstant.KEY_APPLICATIONID, request.getApplicationID());
params.put(HwPayConstant.KEY_PRODUCT_NO, request.getProductNo());
params.put(HwPayConstant.KEY_REQUESTID, request.getRequestId());
params.put(HwPayConstant.KEY_SDKCHANNEL, request.getSdkChannel());
// 可选参数
params.put(HwPayConstant.KEY_URL, request.getUrl());
params.put(HwPayConstant.KEY_URLVER, request.getUrlVer());
return getNoSign(params, false);
} |
java | private void readIdToPendingRequests(CoronaSerializer coronaSerializer)
throws IOException {
coronaSerializer.readField("idToPendingRequests");
// Expecting the START_ARRAY token for idToPendingRequests
coronaSerializer.readStartArrayToken("idToPendingRequests");
JsonToken current = coronaSerializer.nextToken();
while (current != JsonToken.END_ARRAY) {
pendingRequestsList.add(coronaSerializer.jsonParser.getIntValue());
current = coronaSerializer.nextToken();
}
// Done with reading the END_ARRAY token for idToPendingRequests
} |
java | public final void replace(final List<Resource> r1, final List<Resource> r2) {
this.resources.removeAll(r1);
this.resources.addAll(r2);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.