language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | protected GenericDataSourceBase<OUT, ?> translateToDataFlow() {
String name = this.name != null ? this.name : this.inputFormat.toString();
if (name.length() > 100) {
name = name.substring(0, 100);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
GenericDataSourceBase<OUT, ?> source = new GenericDataSourceBase(this.inputFormat,
new OperatorInformation<OUT>(getType()), name);
source.setDegreeOfParallelism(dop);
return source;
} |
java | public static String paceFormat(final Number value, final TimeMillis interval)
{
return paceFormat(value, interval.millis());
} |
java | public Observable<EntityRole> getEntityRoleAsync(UUID appId, String versionId, UUID entityId, UUID roleId) {
return getEntityRoleWithServiceResponseAsync(appId, versionId, entityId, roleId).map(new Func1<ServiceResponse<EntityRole>, EntityRole>() {
@Override
public EntityRole call(ServiceResponse<EntityRole> response) {
return response.body();
}
});
} |
java | private Section parseSection(boolean mediaQuery) {
Section section = new Section();
parseSectionSelector(mediaQuery, section);
tokenizer.consumeExpectedSymbol("{");
while (tokenizer.more()) {
if (tokenizer.current().isSymbol("}")) {
tokenizer.consumeExpectedSymbol("}");
return section;
}
// Parse "normal" attributes like "font-weight: bold;"
if (isAtAttribute()) {
Attribute attr = parseAttribute();
section.addAttribute(attr);
} else if (tokenizer.current().isKeyword(KEYWORD_MEDIA)) {
// Take care of @media sub sections
section.addSubSection(parseSection(true));
} else if (tokenizer.current().isKeyword(KEYWORD_INCLUDE)) {
parseInclude(section);
} else if (tokenizer.current().isKeyword(KEYWORD_EXTEND)) {
parseExtend(section);
} else {
// If it is neither an attribute, nor a media query or instruction - it is probably a sub section...
section.addSubSection(parseSection(false));
}
}
tokenizer.consumeExpectedSymbol("}");
return section;
} |
java | protected com.sun.jersey.api.client.Client getJerseyClient() {
final ClientConfig clientConfig = new DefaultClientConfig();
clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
com.sun.jersey.api.client.Client client = com.sun.jersey.api.client.Client.create(clientConfig);
if(this.debug) {
client.addFilter(new LoggingFilter(System.out));
}
return client;
} |
python | def set_default_reference(self, method, reference):
"""
Set the default reference for a method.
:arg method: name of a method
:type method: :class:`str`
{reference}
"""
if method not in self._available_methods:
raise ValueError('Unknown method: {0}'.format(method))
self._default_references[method] = reference |
python | def process_commmon(self):
'''
Some data processing common for all services.
No need to override this.
'''
data = self.data
data_content = data['content'][0]
## Paste the output of a command
# This is deprecated after piping support
if data['command']:
try:
call = subprocess.Popen(data_content.split(),
stderr=subprocess.PIPE,
stdout = subprocess.PIPE)
out, err = call.communicate()
content = out
except OSError:
logging.exception('Cannot execute the command')
content = ''
if not data['title']:
data['title'] = 'Output of command: `%s`' %(data_content)
## Paste the output of a file
# This is deprecated after piping support
elif data['file']:
try:
f = file(data_content)
content = f.read()
f.close()
except IOError:
logging.exception('File not present or unreadable')
content = ''
if not data['title']:
data['title'] = 'File: `%s`' %(data_content)
else:
content = data_content
self.data['content'] = content
self.data['syntax'] = self.SYNTAX_DICT.get(self.data['syntax'], '')
# Excluded data not useful in paste information
for key in ['func', 'verbose', 'service', 'extra', 'command', 'file']:
del self.data[key] |
java | public void setDesiredWeightsAndCapacities(java.util.Collection<DesiredWeightAndCapacity> desiredWeightsAndCapacities) {
if (desiredWeightsAndCapacities == null) {
this.desiredWeightsAndCapacities = null;
return;
}
this.desiredWeightsAndCapacities = new java.util.ArrayList<DesiredWeightAndCapacity>(desiredWeightsAndCapacities);
} |
python | def _link_variables_on_expr(self, variable_manager, block, stmt_idx, stmt, expr):
"""
Link atoms (AIL expressions) in the given expression to corresponding variables identified previously.
:param variable_manager: Variable manager of the function.
:param ailment.Block block: AIL block.
:param int stmt_idx: ID of the statement.
:param stmt: The AIL statement that `expr` belongs to.
:param expr: The AIl expression to work on.
:return: None
"""
if type(expr) is ailment.Expr.Register:
# find a register variable
reg_vars = variable_manager.find_variables_by_atom(block.addr, stmt_idx, expr)
# TODO: make sure it is the correct register we are looking for
if len(reg_vars) == 1:
reg_var, offset = next(iter(reg_vars))
expr.variable = reg_var
expr.offset = offset
elif type(expr) is ailment.Expr.Load:
# import ipdb; ipdb.set_trace()
variables = variable_manager.find_variables_by_atom(block.addr, stmt_idx, expr)
if len(variables) == 0:
self._link_variables_on_expr(variable_manager, block, stmt_idx, stmt, expr.addr)
else:
if len(variables) > 1:
l.error("More than one variable are available for atom %s. Consider fixing it using phi nodes.",
expr
)
var, offset = next(iter(variables))
expr.variable = var
expr.offset = offset
elif type(expr) is ailment.Expr.BinaryOp:
variables = variable_manager.find_variables_by_atom(block.addr, stmt_idx, expr)
if len(variables) == 1:
var, offset = next(iter(variables))
expr.referenced_variable = var
expr.offset = offset
else:
self._link_variables_on_expr(variable_manager, block, stmt_idx, stmt, expr.operands[0])
self._link_variables_on_expr(variable_manager, block, stmt_idx, stmt, expr.operands[1])
elif type(expr) is ailment.Expr.UnaryOp:
variables = variable_manager.find_variables_by_atom(block.addr, stmt_idx, expr)
if len(variables) == 1:
var, offset = next(iter(variables))
expr.referenced_variable = var
expr.offset = offset
else:
self._link_variables_on_expr(variable_manager, block, stmt_idx, stmt, expr.operands)
elif type(expr) is ailment.Expr.Convert:
self._link_variables_on_expr(variable_manager, block, stmt_idx, stmt, expr.operand)
elif isinstance(expr, ailment.Expr.BasePointerOffset):
variables = variable_manager.find_variables_by_atom(block.addr, stmt_idx, expr)
if len(variables) == 1:
var, offset = next(iter(variables))
expr.referenced_variable = var
expr.offset = offset |
python | def query_instance(vm_=None, call=None):
'''
Query an instance upon creation from the Joyent API
'''
if isinstance(vm_, six.string_types) and call == 'action':
vm_ = {'name': vm_, 'provider': 'joyent'}
if call == 'function':
# Technically this function may be called other ways too, but it
# definitely cannot be called with --function.
raise SaltCloudSystemExit(
'The query_instance action must be called with -a or --action.'
)
__utils__['cloud.fire_event'](
'event',
'querying instance',
'salt/cloud/{0}/querying'.format(vm_['name']),
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
def _query_ip_address():
data = show_instance(vm_['name'], call='action')
if not data:
log.error(
'There was an error while querying Joyent. Empty response'
)
# Trigger a failure in the wait for IP function
return False
if isinstance(data, dict) and 'error' in data:
log.warning('There was an error in the query %s', data.get('error'))
# Trigger a failure in the wait for IP function
return False
log.debug('Returned query data: %s', data)
if 'primaryIp' in data[1]:
# Wait for SSH to be fully configured on the remote side
if data[1]['state'] == 'running':
return data[1]['primaryIp']
return None
try:
data = salt.utils.cloud.wait_for_ip(
_query_ip_address,
timeout=config.get_cloud_config_value(
'wait_for_ip_timeout', vm_, __opts__, default=10 * 60),
interval=config.get_cloud_config_value(
'wait_for_ip_interval', vm_, __opts__, default=10),
interval_multiplier=config.get_cloud_config_value(
'wait_for_ip_interval_multiplier', vm_, __opts__, default=1),
)
except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc:
try:
# destroy(vm_['name'])
pass
except SaltCloudSystemExit:
pass
finally:
raise SaltCloudSystemExit(six.text_type(exc))
return data |
java | protected WebElement getFirstElement(Elements elems) {
DocumentWebElement documentWebElement = Iterables.getFirst(elems.as(InternalWebElements.class).wrappedNativeElements(), null);
return documentWebElement == null ? null : documentWebElement.getWrappedWebElement();
} |
python | def toPlanarPotential(Pot):
"""
NAME:
toPlanarPotential
PURPOSE:
convert an Potential to a planarPotential in the mid-plane (z=0)
INPUT:
Pot - Potential instance or list of such instances (existing planarPotential instances are just copied to the output)
OUTPUT:
planarPotential instance(s)
HISTORY:
2016-06-11 - Written - Bovy (UofT)
"""
Pot= flatten(Pot)
if isinstance(Pot,list):
out= []
for pot in Pot:
if isinstance(pot,planarPotential):
out.append(pot)
elif isinstance(pot,Potential) and pot.isNonAxi:
out.append(planarPotentialFromFullPotential(pot))
elif isinstance(pot,Potential):
out.append(planarPotentialFromRZPotential(pot))
else:
raise PotentialError("Input to 'toPlanarPotential' is neither an Potential-instance or a list of such instances")
return out
elif isinstance(Pot,Potential) and Pot.isNonAxi:
return planarPotentialFromFullPotential(Pot)
elif isinstance(Pot,Potential):
return planarPotentialFromRZPotential(Pot)
elif isinstance(Pot,planarPotential):
return Pot
else:
raise PotentialError("Input to 'toPlanarPotential' is neither an Potential-instance or a list of such instances") |
java | public void trace(Object message, Throwable t) {
differentiatedLog(null, LOGGER_FQCN, LocationAwareLogger.TRACE_INT, message, t);
} |
python | def _render_conditions(conditions):
"""Render the conditions part of a query.
Parameters
----------
conditions : list
A list of dictionary items to filter a table.
Returns
-------
str
A string that represents the "where" part of a query
See Also
--------
render_query : Further clarification of `conditions` formatting.
"""
if not conditions:
return ""
rendered_conditions = []
for condition in conditions:
field = condition.get('field')
field_type = condition.get('type')
comparators = condition.get('comparators')
if None in (field, field_type, comparators) or not comparators:
logger.warn('Invalid condition passed in: %s' % condition)
continue
rendered_conditions.append(
_render_condition(field, field_type, comparators))
if not rendered_conditions:
return ""
return "WHERE %s" % (" AND ".join(rendered_conditions)) |
java | public String createRunner(Vector<Object> runnerParams)
{
try
{
Runner runner = XmlRpcDataMarshaller.toRunner(runnerParams);
service.createRunner(runner);
log.debug( "Created Runner: " + runner.getName() );
return SUCCESS;
}
catch (Exception e)
{
return errorAsString( e, RUNNER_CREATE_FAILED );
}
} |
java | @Override
public Model read(Configuration config) {
String name = config.getName();
Descriptor desc = getDescriptor();
if (IMPLEMENTATION_RULES.equals(name)) {
return new V1RulesComponentImplementationModel(config, desc);
} else if (OPERATION.equals(name)) {
return new V1RulesOperationModel(config, desc);
}
return super.read(config);
} |
java | @Deprecated
public static final StyleSheet parse(String css) throws IOException,
CSSException {
URL base = new URL("file:///base/url/is/not/specified"); //Cannot determine the base URI in this method but we need some base URI for relative URLs
return getCSSParserFactory().parse(css, getNetworkProcessor(),
null, SourceType.EMBEDDED, base);
} |
python | def maybe_curry(maybe_fn, first_arg) -> 'Function | Any':
"""
If maybe_fn is a function, curries it and passes in first_arg. Otherwise
returns maybe_fn.
"""
if not callable(maybe_fn):
return maybe_fn
return tz.curry(maybe_fn)(first_arg) |
java | private List<CollectionJsonData> determineQueryProperties() {
if (!getHttpMethod().equals(HttpMethod.GET)) {
return Collections.emptyList();
}
return getQueryMethodParameters().stream() //
.map(queryProperty -> new CollectionJsonData() //
.withName(queryProperty.getName()) //
.withValue("")) //
.collect(Collectors.toList());
} |
java | public static <T extends Number & Comparable<?>> NumberExpression<T> asNumber(T value) {
return asNumber(constant(value));
} |
java | private Node createBootstrapField(String fieldName, String labelText,
Element input, boolean showError) {
Validate.notNull(input, "You must define an input-element.");
Validate.notNull(fieldName, "You must define a fieldName.");
LOG.trace("Creating Bootstrap field with fieldname = '{}'", fieldName);
// Create the new elements tags
final Element controlgroup = new Element("div");
controlgroup.setAttribute(ATTR_CLASS, "control-group");
if (showError) {
controlgroup.setAttribute("th:classappend", "${#fields.hasErrors('"
+ fieldName + "')}? 'error'");
}
final Element label = new Element("label");
label.setAttribute(ATTR_CLASS, "control-label");
label.setAttribute("th:for", "'" + fieldName + "'");
label.addChild(new Text(labelText));
final Element controls = new Element("div");
controls.setAttribute(ATTR_CLASS, "controls");
// It necessary to clone the element, otherwise it wouldn't recomputed.
final Node newInput = input.cloneNode(null, false);
controls.addChild(newInput);
if (showError) {
Element help = new Element("span");
help.setAttribute(ATTR_CLASS, "help-inline");
help.setAttribute("id", "error_" + fieldName);
help.setAttribute("th:if", "${#fields.hasErrors('" + fieldName
+ "')}");
help.setAttribute("th:errors", "*{" + fieldName + "}");
controls.addChild(help);
}
controlgroup.addChild(label);
controlgroup.addChild(controls);
return controlgroup;
} |
python | def api_retrieve(self, api_key=None):
"""
Call the stripe API's retrieve operation for this model.
:param api_key: The api key to use for this request. Defaults to settings.STRIPE_SECRET_KEY.
:type api_key: string
"""
api_key = api_key or self.default_api_key
return self.stripe_class.retrieve(
id=self.id, api_key=api_key, expand=self.expand_fields
) |
java | @Override
public void schema(Class<?> type)
{
Objects.requireNonNull(type);
_context.schema(type);
} |
java | public static DescriptorExtensionList<CaptchaSupport, Descriptor<CaptchaSupport>> all() {
return Jenkins.getInstance().<CaptchaSupport, Descriptor<CaptchaSupport>>getDescriptorList(CaptchaSupport.class);
} |
java | protected static void destroyProcess(String pid, long sleeptimeBeforeSigkill,
boolean inBackground) {
terminateProcess(pid);
sigKill(pid, false, sleeptimeBeforeSigkill, inBackground);
} |
python | def create(provider, count=1, name=None, **kwargs):
r'''
Create one or more cloud servers
Args:
* provider (str): Cloud provider, e.g. ec2, digitalocean
* count (int) =1: Number of instances
* name (str) =None: Name of server(s)
* \**kwargs: Provider-specific flags
'''
count = int(count)
provider = provider_by_name(provider)
options = provider.create_server_defaults
options.update(kwargs)
names = [name] * count
provider.validate_create_options(**options)
return provider.create_servers(count, names, **options) |
java | public Object set(int index, Object element)
{
if ((index < m_iStartIndex) || (index >= m_iStartIndex + m_iMaxSize))
{ // Out of bounds, re-adjust bounds
int iNewStart = index - m_iMaxSize / 2; // index should be right in the middle
if (iNewStart < 0)
iNewStart = 0;
int iStart = 0;
int iEnd = this.size() - 1;
int iIncrement = +1;
if (iNewStart < m_iStartIndex)
{ // Go backwards to avoid overlaying values
iStart = iEnd;
iEnd = 0;
iIncrement = -1;
}
for (int i = iStart; i * iIncrement <= iEnd; i = i + iIncrement)
{
Object obj = super.set(i, null); // Get and clear value
int iShiftedIndex = i + m_iStartIndex - iNewStart;
if ((iShiftedIndex >= 0) && (iShiftedIndex < this.size()))
super.set(iShiftedIndex, obj); // Move/set it!
else
this.freeObject(m_iStartIndex + i, obj); // Notify obj
}
m_iStartIndex = iNewStart;
}
if (index >= m_iStartIndex + this.size())
{ // Need to add empty elements inbetween
for (int i = m_iStartIndex + this.size(); i <= index; i++)
this.add(null); // Add a placeholder
}
index = index - m_iStartIndex;
return super.set(index, element);
} |
java | public void actionDelete() throws JspException {
// save initialized instance of this class in request attribute for included sub-elements
getJsp().getRequest().setAttribute(SESSION_WORKPLACE_CLASS, this);
try {
getCms().deletePropertyDefinition(getParamPropertyName());
// close the dialog
actionCloseDialog();
} catch (Throwable e) {
// error while deleting property definition, show error dialog
includeErrorpage(this, e);
}
} |
python | def insert_right(self, item):
'Insert a new item. If equal keys are found, add to the right'
k = self._key(item)
i = bisect_right(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item) |
java | public final List<DataSlice> flatten(Object store) throws MessageEncodeFailedException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "flatten");
List<DataSlice> slices = null; // d395685
try { // d395685
/* The JMO does the real work */
slices = jmo.flatten(store, getFlattenedClassName()); // SIB0112b.mfp.2
} // d395685
// If the flatten fails, it may be an UnsupportedEncodingException which means
// we have an underlying MQ message with an unsupported CCSID for the data.
// If that is the case, the message may well be on the ExceptionDestination
// already so we have to be able to flatten it. d395685
catch (MessageEncodeFailedException e) {
// Lower-levels have already FFDC'd everything that could possible be useful.
// No FFDC code needed
// Is the exception caused by an unsupported CCSID?
Throwable e1 = e.getCause();
while ((e1 != null) && !(e1 instanceof UnsupportedEncodingException)) {
e1 = e1.getCause();
}
// If so, we therefore transcribe it to JMF and have another go.
if (e1 != null) {
try {
JsMessageImpl tempMsg = (JsMessageImpl)this.transcribeToJmf();
slices = tempMsg.jmo.flatten(store, getFlattenedClassName());
}
// If we can't transcribe there must be soemthing else wrong so give up
catch (Exception newe) {
// Lower-levels have already FFDC'd everything that could possible be useful.
// No FFDC code needed
if (newe instanceof MessageEncodeFailedException) {
throw (MessageEncodeFailedException)newe;
}
else {
throw new MessageEncodeFailedException(newe);
}
}
}
// If we got to the end of the exception chain without finding UnsupportedEncodingException
// we must have a disaster, so we just throw the original exception on.
else {
throw e;
}
}
/* Now we know the real size of the flattened message, store it */
int temp = 0;
for (int i=0; i < slices.size(); i++) {
temp = temp + slices.get(i).getLength();
}
// Setting approxLength isn't synchronized as a) it is atomic & b) the correctness isn't vital
approxLength = temp;
/* and return the result */
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "flatten");
return slices;
} |
python | def mission_request_send(self, target_system, target_component, seq, force_mavlink1=False):
'''
Request the information of the mission item with the sequence number
seq. The response of the system to this message should
be a MISSION_ITEM message.
http://qgroundcontrol.org/mavlink/waypoint_protocol
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
seq : Sequence (uint16_t)
'''
return self.send(self.mission_request_encode(target_system, target_component, seq), force_mavlink1=force_mavlink1) |
java | @SuppressWarnings("unchecked")
private static Deserializer<?> getDeserializer(Configuration conf) {
String deserializerClassName = conf.get(HBaseBackfillMerger.CONFKEY_DESERIALIZER);
if (deserializerClassName == null) {
throw new RuntimeException("Configuration didn't set " + deserializerClassName);
}
try {
Class<?> deserializerClass = Class.forName(deserializerClassName);
if (!Deserializer.class.isAssignableFrom(deserializerClass)) {
final String errMsg = "The provided deserializer class " +
conf.get(HBaseBackfillMerger.CONFKEY_DESERIALIZER) + "doesn't implement " +
Deserializer.class.getName();
log.error(errMsg);
}
return ((Class<? extends Deserializer<?>>) deserializerClass).newInstance();
} catch (Exception e) {
log.error("Couldn't instantiate deserializer", e);
throw new RuntimeException(e);
}
} |
java | private void fireNewCurrentSolution(SolutionType newCurrentSolution,
Evaluation newCurrentSolutionEvaluation,
Validation newCurrentSolutionValidation){
for(SearchListener<? super SolutionType> l : getSearchListeners()){
l.newCurrentSolution(this, newCurrentSolution,
newCurrentSolutionEvaluation,
newCurrentSolutionValidation);
}
} |
java | public static <T> List<T> take(List<T> self, int num) {
return (List<T>) take((Iterable<T>)self, num);
} |
python | def _load_prev(self):
"""Load the next days data (or file) without decrementing the date.
Repeated calls will not decrement date/file and will produce the same
data
Uses info stored in object to either decrement the date,
or the file. Looks for self._load_by_date flag.
"""
if self._load_by_date:
prev_date = self.date - pds.DateOffset(days=1)
return self._load_data(date=prev_date)
else:
return self._load_data(fid=self._fid-1) |
java | @Override
public double getValue(double quantile) {
if (quantile < 0.0 || quantile > 1.0 || Double.isNaN( quantile )) {
throw new IllegalArgumentException(quantile + " is not in [0..1]");
}
if (values.length == 0) {
return 0.0;
}
int posx = Arrays.binarySearch(quantiles, quantile);
if (posx < 0)
posx = ((-posx) - 1) - 1;
if (posx < 1) {
return values[0];
}
if (posx >= values.length) {
return values[values.length - 1];
}
return values[posx];
} |
python | def public_ip_addresses_list(resource_group, **kwargs):
'''
.. versionadded:: 2019.2.0
List all public IP addresses within a resource group.
:param resource_group: The resource group name to list public IP
addresses within.
CLI Example:
.. code-block:: bash
salt-call azurearm_network.public_ip_addresses_list testgroup
'''
result = {}
netconn = __utils__['azurearm.get_client']('network', **kwargs)
try:
pub_ips = __utils__['azurearm.paged_object_to_list'](
netconn.public_ip_addresses.list(
resource_group_name=resource_group
)
)
for ip in pub_ips:
result[ip['name']] = ip
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs)
result = {'error': str(exc)}
return result |
python | def igmpize(self, ip=None, ether=None):
"""Called to explicitely fixup associated IP and Ethernet headers
Parameters:
self The instantiation of an IGMP class.
ip The instantiation of the associated IP class.
ether The instantiation of the associated Ethernet.
Returns:
True The tuple ether/ip/self passed all check and represents
a proper IGMP packet.
False One of more validation checks failed and no fields
were adjusted.
The function will examine the IGMP message to assure proper format.
Corrections will be attempted if possible. The IP header is then properly
adjusted to ensure correct formatting and assignment. The Ethernet header
is then adjusted to the proper IGMP packet format.
"""
# The rules are:
# 1. the Max Response time is meaningful only in Membership Queries and should be zero
# otherwise (RFC 2236, section 2.2)
if (self.type != 0x11): #rule 1
self.mrtime = 0
if (self.adjust_ip(ip) == True):
if (self.adjust_ether(ip, ether) == True): return True
return False |
python | def apply(self, func, num_splits=None, other_axis_partition=None, **kwargs):
"""Applies func to the object in the plasma store.
See notes in Parent class about this method.
Args:
func: The function to apply.
num_splits: The number of times to split the result object.
other_axis_partition: Another `PyarrowOnRayFrameAxisPartition` object to apply to
func with this one.
Returns:
A list of `RayRemotePartition` objects.
"""
if num_splits is None:
num_splits = len(self.list_of_blocks)
if other_axis_partition is not None:
return [
PyarrowOnRayFramePartition(obj)
for obj in deploy_ray_func_between_two_axis_partitions._remote(
args=(self.axis, func, num_splits, len(self.list_of_blocks), kwargs)
+ tuple(self.list_of_blocks + other_axis_partition.list_of_blocks),
num_return_vals=num_splits,
)
]
args = [self.axis, func, num_splits, kwargs]
args.extend(self.list_of_blocks)
return [
PyarrowOnRayFramePartition(obj)
for obj in deploy_ray_axis_func._remote(args, num_return_vals=num_splits)
] |
python | def main():
"""
NAME
apwp.py
DESCRIPTION
returns predicted paleolatitudes, directions and pole latitude/longitude
from apparent polar wander paths of Besse and Courtillot (2002).
SYNTAX
apwp.py [command line options][< filename]
OPTIONS
-h prints help message and quits
-i allows interactive data entry
f file: read plate, lat, lon, age data from file
-F output_file: write output to output_file
-P [NA, SA, AF, IN, EU, AU, ANT, GL] plate
-lat LAT specify present latitude (positive = North; negative=South)
-lon LON specify present longitude (positive = East, negative=West)
-age AGE specify Age in Ma
Note: must have all -P, -lat, -lon, -age or none.
OUTPUT
Age Paleolat. Dec. Inc. Pole_lat. Pole_Long.
"""
infile,outfile,data,indata="","",[],[]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind=sys.argv.index('-F')
outfile=sys.argv[ind+1]
out=open(outfile,'w')
if '-i' in sys.argv:
print("Welcome to paleolatitude calculator\n")
while 1:
data=[]
print("pick a plate: NA, SA, AF, IN, EU, AU, ANT, GL \n cntl-D to quit")
try:
plate=input("Plate\n").upper()
except:
print("Goodbye \n")
sys.exit()
lat=float(input( "Site latitude\n"))
lon=float(input(" Site longitude\n"))
age=float(input(" Age\n"))
data=[plate,lat,lon,age]
print("Age Paleolat. Dec. Inc. Pole_lat. Pole_Long.")
print(spitout(data))
elif '-f' in sys.argv:
ind=sys.argv.index('-f')
infile=sys.argv[ind+1]
f=open(infile,'r')
inp=f.readlines()
elif '-P' in sys.argv:
ind=sys.argv.index('-P')
plate=sys.argv[ind+1].upper()
if '-lat' in sys.argv:
ind=sys.argv.index('-lat')
lat=float(sys.argv[ind+1])
else:
print(main.__doc__)
sys.exit()
if '-lon' in sys.argv:
ind=sys.argv.index('-lon')
lon=float(sys.argv[ind+1])
else:
print(main.__doc__)
sys.exit()
if '-age' in sys.argv:
ind=sys.argv.index('-age')
age=float(sys.argv[ind+1])
else:
print(main.__doc__)
sys.exit()
data=[plate,lat,lon,age]
outstring=spitout(data)
if outfile=="":
print("Age Paleolat. Dec. Inc. Pole_lat. Pole_Long.")
print(outstring)
else:
out.write(outstring)
sys.exit()
else:
inp=sys.stdin.readlines() # read from standard input
if len(inp)>0:
for line in inp:
data=[]
rec=line.split()
data.append(rec[0])
for k in range(1,4): data.append(float(rec[k]))
indata.append(data)
if len(indata)>0:
for line in indata:
outstring=spitout(line)
if outfile=="":
print(outstring)
else:
out.write(outstring)
else:
print('no input data')
sys.exit() |
java | @SuppressWarnings("unchecked")
@Override
public T param(final String paramName, final Supplier<Object> supplier) {
context().param(paramName, supplier);
return (T) this;
} |
python | def zernike(zernike_indexes,labels,indexes):
"""Compute the Zernike features for the labels with the label #s in indexes
returns the score per labels and an array of one image per zernike feature
"""
#
# "Reverse_indexes" is -1 if a label # is not to be processed. Otherwise
# reverse_index[label] gives you the index into indexes of the label
# and other similarly shaped vectors (like the results)
#
indexes = np.array(indexes,dtype=np.int32)
nindexes = len(indexes)
reverse_indexes = np.empty((np.max(indexes)+1,),int)
reverse_indexes.fill(-1)
reverse_indexes[indexes] = np.arange(indexes.shape[0],dtype=int)
mask = reverse_indexes[labels] != -1
centers,radii = minimum_enclosing_circle(labels,indexes)
ny, nx = labels.shape[0:2]
y, x = np.asarray(np.mgrid[0:ny-1:complex(0,ny),0:nx-1:complex(0,nx)], dtype=float)
xm = x[mask]
ym = y[mask]
lm = labels[mask]
#
# The Zernikes are inscribed in circles with points labeled by
# their fractional distance (-1 <= x,y <= 1) from the center.
# So we transform x and y by subtracting the center and
# dividing by the radius
#
rev_ind = reverse_indexes[lm]
## ym = (ym-centers[reverse_indexes[lm],0]) / radii[reverse_indexes[lm]]
ym -= centers[rev_ind,0]
ym /= radii[rev_ind]
## xm = (xm-centers[reverse_indexes[lm],1]) / radii[reverse_indexes[lm]]
xm -= centers[rev_ind,1]
xm /= radii[rev_ind]
#
# Blow up ym and xm into new x and y vectors
#
x = np.zeros_like(x)
x[mask]=xm
y = np.zeros_like(y)
y[mask]=ym
#
# Pass the resulting x and y through the rest of Zernikeland
#
score = np.zeros((nindexes, len(zernike_indexes)))
zf = construct_zernike_polynomials(x, y, zernike_indexes, mask)
score = score_zernike(zf, radii, labels, indexes)
return score |
java | protected void pushToGrid(OpDescriptor descriptor, boolean flush) {
// we should just add op to queue here
//deviceQueues.get().add(descriptor);
// FIXME: following code should be removed, since it's just executing supers instead of batching
execCounter.incrementAndGet();
Op op = descriptor.getOp();
int[] dimensions = descriptor.getDimensions();
if (op instanceof TransformOp) {
TransformOp t = (TransformOp) op;
if (flush)
flushQueue();
//logger.info("Sending TransformOp to CudaExecutioner");
super.invoke(t);
} else if (op instanceof Variance) {
Variance acc = (Variance) op;
if (flush)
flushQueue();
super.naiveExec(acc, dimensions);
} else if (op instanceof ReduceOp) {
ReduceOp acc = (ReduceOp) op;
if (flush)
flushQueue();
//logger.info("Sending AccumulationOp to CudaExecutioner: {}", Arrays.toString(dimensions));
super.naiveExec(acc, dimensions);
} else if (op instanceof ScalarOp) {
ScalarOp sc = (ScalarOp) op;
if (flush)
flushQueue();
//logger.info("Sending ScalarOp to CudaExecutioner");
super.invoke(sc);
} else if (op instanceof BroadcastOp) {
BroadcastOp broadcastOp = (BroadcastOp) op;
if (flush)
flushQueue();
//logger.info("Sending BroadcastOp to CudaExecutioner");
if (dimensions != null) {
super.exec(broadcastOp);
} else {
super.invoke(broadcastOp);
}
} else if (op instanceof IndexAccumulation) {
IndexAccumulation indexAccumulation = (IndexAccumulation) op;
if (flush)
flushQueue();
//logger.info("Sending IndexAccumulationOp to CudaExecutioner");
//super.exec(indexAccumulation, dimensions);
} else if (op instanceof MetaOp) {
// logger.info("Executing MetaOp");
metaCounter.incrementAndGet();
exec((MetaOp) op);
} else if (op instanceof GridOp) {
// logger.info("Executing GridOp");
exec((GridOp) op);
}
} |
java | protected Parser parser(Key jobKey) {
ParserProvider pp = ParserService.INSTANCE.getByInfo(_parse_type);
if (pp != null) {
return pp.createParser(this, jobKey);
}
throw new H2OIllegalArgumentException("Unknown file type. Parse cannot be completed.",
"Attempted to invoke a parser for ParseType:" + _parse_type + ", which doesn't exist.");
} |
python | def includes(self):
"""Return all of the include directories for this chip as a list."""
incs = self.combined_properties('includes')
processed_incs = []
for prop in incs:
if isinstance(prop, str):
processed_incs.append(prop)
else:
processed_incs.append(os.path.join(*prop))
# All include paths are relative to base directory of the
fullpaths = [os.path.normpath(os.path.join('.', x)) for x in processed_incs]
fullpaths.append(os.path.normpath(os.path.abspath(self.build_dirs()['build'])))
return fullpaths |
java | public void setDateAttribute(String name, Date value) {
Attribute attribute = getAttributes().get(name);
if (!(attribute instanceof DateAttribute)) {
throw new IllegalStateException("Cannot set date value on attribute with different type, " +
attribute.getClass().getName() + " setting value " + value);
}
((DateAttribute) attribute).setValue(value);
} |
java | public <T> T get(URI uri, Class<T> classType) {
HttpConnection connection = Http.GET(uri);
InputStream response = executeToInputStream(connection);
try {
return getResponse(response, classType, getGson());
} finally {
close(response);
}
} |
java | public static EndpointCaller initializeEndpointCaller(Properties properties) {
EndpointCaller ec;
try {
LOG.debug("Initializing endpoint caller. Checking whether '{}' is in classpath.",
TRACING_ENDPOINT_CALLER_CLASSNAME);
Class<?> tracingEndpointCallerClass = Class.forName(TRACING_ENDPOINT_CALLER_CLASSNAME);
Constructor<?> tracingEndpointCallerConstructor = tracingEndpointCallerClass
.getConstructor(Properties.class);
ec = (EndpointCaller) tracingEndpointCallerConstructor.newInstance(properties);
LOG.debug("Using '{}' instance as endpoint caller object.", TRACING_ENDPOINT_CALLER_CLASSNAME);
} catch (Exception e) {
ec = new BasicEndpointCaller(properties);
LOG.debug("Using '{}' instance as endpoint caller object.", BasicEndpointCaller.class.getName());
}
return ec;
} |
python | def log(ctx, archive_name):
'''
Get the version log for an archive
'''
_generate_api(ctx)
ctx.obj.api.get_archive(archive_name).log() |
python | def _get(self, pos):
"""loads widget at given position; handling invalid arguments"""
res = None, None
if pos is not None:
try:
res = self[pos], pos
except (IndexError, KeyError):
pass
return res |
java | public static boolean shouldRaid(Configuration conf, FileSystem srcFs,
FileStatus stat, Codec codec, List<FileStatus> lfs) throws IOException {
Path p = stat.getPath();
long blockNum = 0L;
if (stat.isDir() != codec.isDirRaid) {
return false;
}
if (tooNewForRaid(stat)) {
return false;
}
blockNum = codec.isDirRaid ?
DirectoryStripeReader.getBlockNum(lfs) : numBlocks(stat);
// if the file/directory has fewer than 2 blocks, then nothing to do
if (blockNum <= RaidState.TOO_SMALL_NOT_RAID_NUM_BLOCKS) {
return false;
}
return !raidedByOtherHighPriCodec(conf, stat, codec);
} |
python | def threshold(requestContext, value, label=None, color=None):
"""
Takes a float F, followed by a label (in double quotes) and a color.
(See ``bgcolor`` in the render\_api_ for valid color names & formats.)
Draws a horizontal line at value F across the graph.
Example::
&target=threshold(123.456, "omgwtfbbq", "red")
"""
[series] = constantLine(requestContext, value)
if label:
series.name = label
if color:
series.color = color
return [series] |
python | def create_textfile_with_contents(filename, contents, encoding='utf-8'):
"""
Creates a textual file with the provided contents in the workdir.
Overwrites an existing file.
"""
ensure_directory_exists(os.path.dirname(filename))
if os.path.exists(filename):
os.remove(filename)
outstream = codecs.open(filename, "w", encoding)
outstream.write(contents)
if contents and not contents.endswith("\n"):
outstream.write("\n")
outstream.flush()
outstream.close()
assert os.path.exists(filename), "ENSURE file exists: %s" % filename |
java | @Override
public EObject create(EClass eClass) {
switch (eClass.getClassifierID()) {
case BpsimPackage.BETA_DISTRIBUTION_TYPE: return createBetaDistributionType();
case BpsimPackage.BINOMIAL_DISTRIBUTION_TYPE: return createBinomialDistributionType();
case BpsimPackage.BOOLEAN_PARAMETER_TYPE: return createBooleanParameterType();
case BpsimPackage.BP_SIM_DATA_TYPE: return createBPSimDataType();
case BpsimPackage.CALENDAR: return createCalendar();
case BpsimPackage.CONSTANT_PARAMETER: return createConstantParameter();
case BpsimPackage.CONTROL_PARAMETERS: return createControlParameters();
case BpsimPackage.COST_PARAMETERS: return createCostParameters();
case BpsimPackage.DATE_TIME_PARAMETER_TYPE: return createDateTimeParameterType();
case BpsimPackage.DISTRIBUTION_PARAMETER: return createDistributionParameter();
case BpsimPackage.DOCUMENT_ROOT: return createDocumentRoot();
case BpsimPackage.DURATION_PARAMETER_TYPE: return createDurationParameterType();
case BpsimPackage.ELEMENT_PARAMETERS: return createElementParameters();
case BpsimPackage.ELEMENT_PARAMETERS_TYPE: return createElementParametersType();
case BpsimPackage.ENUM_PARAMETER_TYPE: return createEnumParameterType();
case BpsimPackage.ERLANG_DISTRIBUTION_TYPE: return createErlangDistributionType();
case BpsimPackage.EXPRESSION_PARAMETER_TYPE: return createExpressionParameterType();
case BpsimPackage.FLOATING_PARAMETER_TYPE: return createFloatingParameterType();
case BpsimPackage.GAMMA_DISTRIBUTION_TYPE: return createGammaDistributionType();
case BpsimPackage.LOG_NORMAL_DISTRIBUTION_TYPE: return createLogNormalDistributionType();
case BpsimPackage.NEGATIVE_EXPONENTIAL_DISTRIBUTION_TYPE: return createNegativeExponentialDistributionType();
case BpsimPackage.NORMAL_DISTRIBUTION_TYPE: return createNormalDistributionType();
case BpsimPackage.NUMERIC_PARAMETER_TYPE: return createNumericParameterType();
case BpsimPackage.PARAMETER: return createParameter();
case BpsimPackage.PARAMETER_VALUE: return createParameterValue();
case BpsimPackage.POISSON_DISTRIBUTION_TYPE: return createPoissonDistributionType();
case BpsimPackage.PRIORITY_PARAMETERS: return createPriorityParameters();
case BpsimPackage.PROPERTY_PARAMETERS: return createPropertyParameters();
case BpsimPackage.PROPERTY_TYPE: return createPropertyType();
case BpsimPackage.RESOURCE_PARAMETERS: return createResourceParameters();
case BpsimPackage.SCENARIO: return createScenario();
case BpsimPackage.SCENARIO_PARAMETERS: return createScenarioParameters();
case BpsimPackage.SCENARIO_PARAMETERS_TYPE: return createScenarioParametersType();
case BpsimPackage.STRING_PARAMETER_TYPE: return createStringParameterType();
case BpsimPackage.TIME_PARAMETERS: return createTimeParameters();
case BpsimPackage.TRIANGULAR_DISTRIBUTION_TYPE: return createTriangularDistributionType();
case BpsimPackage.TRUNCATED_NORMAL_DISTRIBUTION_TYPE: return createTruncatedNormalDistributionType();
case BpsimPackage.UNIFORM_DISTRIBUTION_TYPE: return createUniformDistributionType();
case BpsimPackage.USER_DISTRIBUTION_DATA_POINT_TYPE: return createUserDistributionDataPointType();
case BpsimPackage.USER_DISTRIBUTION_TYPE: return createUserDistributionType();
case BpsimPackage.VENDOR_EXTENSION: return createVendorExtension();
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE: return createWeibullDistributionType();
default:
throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier");
}
} |
python | def match(self, category, pattern):
"""Match the category."""
return fnmatch.fnmatch(category, pattern, flags=self.FNMATCH_FLAGS) |
java | public DependencyCustomizer ifAnyMissingClasses(String... classNames) {
return new DependencyCustomizer(this) {
@Override
protected boolean canAdd() {
for (String className : classNames) {
try {
DependencyCustomizer.this.loader.loadClass(className);
}
catch (Exception ex) {
return true;
}
}
return false;
}
};
} |
java | protected Object doInvoke(Object... args) throws Exception {
ReflectionUtils.makeAccessible(getBridgedMethod());
try {
return getBridgedMethod().invoke(getBean(), args);
}
catch (IllegalArgumentException ex) {
assertTargetBean(getBridgedMethod(), getBean(), args);
throw new IllegalStateException(
getInvocationErrorMessage(ex.getMessage(), args), ex);
}
catch (InvocationTargetException ex) {
// Unwrap for HandlerExceptionResolvers ...
Throwable targetException = ex.getTargetException();
if (targetException instanceof RuntimeException) {
throw (RuntimeException) targetException;
}
else if (targetException instanceof Error) {
throw (Error) targetException;
}
else if (targetException instanceof Exception) {
throw (Exception) targetException;
}
else {
String msg = getInvocationErrorMessage(
"Failed to invoke controller method", args);
throw new IllegalStateException(msg, targetException);
}
}
} |
python | def view_dupl_sources(token, dstore):
"""
Show the sources with the same ID and the truly duplicated sources
"""
fields = ['source_id', 'code', 'gidx1', 'gidx2', 'num_ruptures']
dic = group_array(dstore['source_info'].value[fields], 'source_id')
sameid = []
dupl = []
for source_id, group in dic.items():
if len(group) > 1: # same ID sources
sources = []
for rec in group:
geom = dstore['source_geom'][rec['gidx1']:rec['gidx2']]
src = Source(source_id, rec['code'], geom, rec['num_ruptures'])
sources.append(src)
if all_equal(sources):
dupl.append(source_id)
sameid.append(source_id)
if not dupl:
return ''
msg = str(dupl) + '\n'
msg += ('Found %d source(s) with the same ID and %d true duplicate(s)'
% (len(sameid), len(dupl)))
fakedupl = set(sameid) - set(dupl)
if fakedupl:
msg += '\nHere is a fake duplicate: %s' % fakedupl.pop()
return msg |
python | def _missing_(cls, value):
"""Lookup function used when value is not found."""
if not (isinstance(value, int) and 0 <= value <= 1):
raise ValueError('%r is not a valid %s' % (value, cls.__name__))
extend_enum(cls, 'Unassigned [%d]' % value, value)
return cls(value)
super()._missing_(value) |
python | def extend(self, other: Operation) -> None:
"""Append gates from circuit to the end of this circuit"""
if isinstance(other, Circuit):
self.elements.extend(other.elements)
else:
self.elements.extend([other]) |
java | public static void calculateCoverate(Path bamPath, Path sqlPath) throws IOException, AlignmentCoverageException {
BamManager bamManager = new BamManager(bamPath);
// Check if the bam index (.bai) does not exit, then create it
if (!bamPath.getParent().resolve(bamPath.getFileName().toString() + ".bai").toFile().exists()) {
bamManager.createIndex();
}
// Calculate coverage and store in SQLite
SAMFileHeader fileHeader = BamUtils.getFileHeader(bamPath);
// long start = System.currentTimeMillis();
initDatabase(fileHeader.getSequenceDictionary().getSequences(), sqlPath);
// System.out.println("SQLite database initialization, in " + ((System.currentTimeMillis() - start) / 1000.0f)
// + " s.");
Path coveragePath = sqlPath.toAbsolutePath().resolve(bamPath.getFileName() + COVERAGE_SUFFIX);
AlignmentOptions options = new AlignmentOptions();
options.setContained(false);
Iterator<SAMSequenceRecord> iterator = fileHeader.getSequenceDictionary().getSequences().iterator();
PrintWriter writer = new PrintWriter(coveragePath.toFile());
StringBuilder line;
// start = System.currentTimeMillis();
while (iterator.hasNext()) {
SAMSequenceRecord next = iterator.next();
for (int i = 0; i < next.getSequenceLength(); i += MINOR_CHUNK_SIZE) {
Region region = new Region(next.getSequenceName(), i + 1,
Math.min(i + MINOR_CHUNK_SIZE, next.getSequenceLength()));
RegionCoverage regionCoverage = bamManager.coverage(region, null, options);
int meanDepth = Math.min(regionCoverage.meanCoverage(), 255);
// File columns: chunk chromosome start end coverage
// chunk format: chrom_id_suffix, where:
// id: int value starting at 0
// suffix: chunkSize + k
// eg. 3_4_1k
line = new StringBuilder();
line.append(region.getChromosome()).append("_");
line.append(i / MINOR_CHUNK_SIZE).append("_").append(MINOR_CHUNK_SIZE / 1000).append("k");
line.append("\t").append(region.getChromosome());
line.append("\t").append(region.getStart());
line.append("\t").append(region.getEnd());
line.append("\t").append(meanDepth);
writer.println(line.toString());
}
}
writer.close();
// System.out.println("Mean coverage file creation, in " + ((System.currentTimeMillis() - start) / 1000.0f) + " s.");
// save file to db
// start = System.currentTimeMillis();
insertCoverageDB(bamPath, sqlPath);
// System.out.println("SQLite database population, in " + ((System.currentTimeMillis() - start) / 1000.0f) + " s.");
} |
python | def print_diskinfo(diskinfo, widelayout, incolor):
''' Disk information output function. '''
sep = ' '
if opts.relative:
import math
base = max([ disk.ocap for disk in diskinfo ])
for disk in diskinfo:
if disk.ismntd: ico = _diskico
else: ico = _unmnico
if disk.isrem: ico = _remvico
if disk.isopt: ico = _discico
if disk.isnet: ico = _netwico
if disk.isram: ico = _ramico
if disk.isimg: ico = _imgico
if disk.mntp == '/boot/efi':
ico = _gearico
if opts.relative and disk.ocap and disk.ocap != base:
# increase log size reduction by raising to 4th power:
gwidth = int((math.log(disk.ocap, base)**4) * opts.width)
else:
gwidth = opts.width
# check color settings, ffg: free foreground, ufg: used forground
if disk.rw:
ffg = ufg = None # auto colors
else:
# dim or dark grey
ffg = ufg = (ansi.dim8 if opts.hicolor else ansi.dim4)
cap = disk.cap
if cap and disk.rw:
lblcolor = ansi.get_label_tmpl(disk.pcnt, opts.width, opts.hicolor)
else:
lblcolor = None
# print stats
data = (
(_usedico, disk.pcnt, ufg, None, pform.boldbar), # Used
(_freeico, 100-disk.pcnt, ffg, None, False), # free
)
mntp = fmtstr(disk.mntp, align='<', trunc='left',
width=(opts.colwidth * 2) + 2)
mntp = mntp.rstrip() # prevent wrap
if disk.label is None:
label = fmtstr(_emptico, dim_templ, align='<')
else:
label = fmtstr(disk.label, align='<')
if widelayout:
out(
fmtstr(ico + sep + disk.dev, align='<') + label
)
if cap:
out(fmtval(cap))
if disk.rw:
out(
fmtval(disk.used, lblcolor) +
fmtval(disk.free, lblcolor)
)
else:
out(
fmtstr() +
fmtstr(_emptico, dim_templ)
)
else:
out(fmtstr(_emptico, dim_templ))
if cap:
if disk.rw: # factoring this caused colored brackets
ansi.rainbar(data, gwidth, incolor,
hicolor=opts.hicolor,
cbrackets=_brckico)
else:
ansi.bargraph(data, gwidth, incolor, cbrackets=_brckico)
if opts.relative and opts.width != gwidth:
out(sep * (opts.width - gwidth))
out(sep + mntp)
print()
else:
out(
fmtstr(ico + sep + disk.dev, align="<") + label
)
if cap:
out(
fmtval(cap) +
fmtval(disk.used, lblcolor) +
fmtval(disk.free, lblcolor)
)
else:
out(fmtstr(_emptico, dim_templ) + fmtstr() + fmtstr())
print(sep, mntp)
if cap:
out(fmtstr())
if disk.rw:
ansi.rainbar(data, gwidth, incolor, hicolor=opts.hicolor,
cbrackets=_brckico)
else:
ansi.bargraph(data, gwidth, incolor, cbrackets=_brckico)
print()
print()
print() |
java | private void generatePartialMatch() {
partialMatch = new int[pattern.length];
int j = 0;
for (int i = 1; i < pattern.length; i++) {
while (j > 0 && pattern[j] != pattern[i]) {
j = partialMatch[j - 1];
}
if (pattern[j] == pattern[i]) {
j++;
}
partialMatch[i] = j;
}
} |
java | private void encodeFooter(final FacesContext context, final ResponseWriter responseWriter, final Sheet sheet)
throws IOException {
// footer
final UIComponent footer = sheet.getFacet("footer");
if (footer != null) {
responseWriter.startElement("div", null);
responseWriter.writeAttribute("class", "ui-datatable-footer ui-widget-header ui-corner-bottom", null);
footer.encodeAll(context);
responseWriter.endElement("div");
}
} |
python | def coerce_quotes(quotes):
"""Coerce a quote type into an acceptable value, or raise an error."""
orig, quotes = quotes, str(quotes) if quotes else None
if quotes not in [None, '"', "'"]:
raise ValueError("{!r} is not a valid quote type".format(orig))
return quotes |
java | private ClassDescriptor[] getMultiJoinedClassDescriptors(ClassDescriptor cld)
{
DescriptorRepository repository = cld.getRepository();
Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true);
ClassDescriptor[] result = new ClassDescriptor[multiJoinedClasses.length];
for (int i = 0 ; i < multiJoinedClasses.length; i++)
{
result[i] = repository.getDescriptorFor(multiJoinedClasses[i]);
}
return result;
} |
python | def find_files_for_tar(self, context, silent_build):
"""
Return [(filename, arcname), ...] for all the files.
"""
if not context.enabled:
return
files = self.find_files(context, silent_build)
for path in files:
relname = os.path.relpath(path, context.parent_dir)
arcname = "./{0}".format(relname.encode('utf-8').decode('ascii', 'ignore'))
if os.path.exists(path):
yield path, arcname |
python | def get_book_ids_by_comment(self, comment_id):
"""Gets the list of ``Book`` ``Ids`` mapped to a ``Comment``.
arg: comment_id (osid.id.Id): ``Id`` of a ``Comment``
return: (osid.id.IdList) - list of book ``Ids``
raise: NotFound - ``comment_id`` is not found
raise: NullArgument - ``comment_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceBinSession.get_bin_ids_by_resource
mgr = self._get_provider_manager('COMMENTING', local=True)
lookup_session = mgr.get_comment_lookup_session(proxy=self._proxy)
lookup_session.use_federated_book_view()
comment = lookup_session.get_comment(comment_id)
id_list = []
for idstr in comment._my_map['assignedBookIds']:
id_list.append(Id(idstr))
return IdList(id_list) |
python | def get_config_from_env(cls):
"""
.. deprecated:: 2.5.3
Gets configuration out of environment.
Returns list of dicts - list of namenode representations
"""
core_path = os.path.join(os.environ['HADOOP_HOME'], 'conf', 'core-site.xml')
core_configs = cls.read_core_config(core_path)
hdfs_path = os.path.join(os.environ['HADOOP_HOME'], 'conf', 'hdfs-site.xml')
hdfs_configs = cls.read_hdfs_config(hdfs_path)
if (not core_configs) and (not hdfs_configs):
raise Exception("No config found in %s nor in %s" % (core_path, hdfs_path))
configs = {
'use_trash': hdfs_configs.get('use_trash', core_configs.get('use_trash', False)),
'use_sasl': core_configs.get('use_sasl', False),
'hdfs_namenode_principal': hdfs_configs.get('hdfs_namenode_principal', None),
'namenodes': hdfs_configs.get('namenodes', []) or core_configs.get('namenodes', [])
}
return configs |
java | private void isparentlogger(Logger logchannel) {
// get all log channels
List<Logger> referenz = getLoggers();
Iterator<Logger> it_logger = referenz.iterator();
while (it_logger.hasNext()) {
Logger child_test = it_logger.next();
// if the logchannel has the given logchannel as parent his loglevel is set to the parent one.
if (logchannel.getName().equals(child_test.getParent().getName())) {
isparentlogger(child_test);
child_test.setLevel(null);
}
}
} |
java | public static TimeOfDay fromMillisOfDay(long millisOfDay, Chronology chrono) {
chrono = DateTimeUtils.getChronology(chrono);
chrono = chrono.withUTC();
return new TimeOfDay(millisOfDay, chrono);
} |
java | protected MailtoModel newMailtoModel(final Object[] params)
{
final MailtoModel model = new MailtoModel(newMailToAddressModel(params),
newMailToViewModel(params));
return model;
} |
java | public FSArray getEntity_attributes() {
if (Entity_Type.featOkTst && ((Entity_Type)jcasType).casFeat_entity_attributes == null)
jcasType.jcas.throwFeatMissing("entity_attributes", "de.julielab.jules.types.ace.Entity");
return (FSArray)(jcasType.ll_cas.ll_getFSForRef(jcasType.ll_cas.ll_getRefValue(addr, ((Entity_Type)jcasType).casFeatCode_entity_attributes)));} |
python | def table_output(data):
'''Get a table representation of a dictionary.'''
if type(data) == DictType:
data = data.items()
headings = [ item[0] for item in data ]
rows = [ item[1] for item in data ]
columns = zip(*rows)
if len(columns):
widths = [ max([ len(str(y)) for y in row ]) for row in rows ]
else:
widths = [ 0 for c in headings ]
for c, heading in enumerate(headings):
widths[c] = max(widths[c], len(heading))
column_count = range(len(rows))
table = [ ' '.join([ headings[c].ljust(widths[c]) for c in column_count ]) ]
table.append(' '.join([ '=' * widths[c] for c in column_count ]))
for column in columns:
table.append(' '.join([ str(column[c]).ljust(widths[c]) for c in column_count ]))
return '\n'.join(table) |
java | public void appendXMLToAutomaticStyle(final XMLUtil util,
final Appendable appendable) throws IOException {
this.pageLayoutStyle.appendXMLToAutomaticStyle(util, appendable);
} |
python | def main(**kwargs):
"""
Draw a couple of simple graphs and optionally generate an HTML file to upload them
"""
draw_lines()
draw_histogram()
draw_bar_chart()
destination = "-r /report"
if use_html:
generate_html()
command = "dx-build-report-html {h} {d}".format(h=html_filename, d=destination)
else:
command = "dx-build-report-html {l} {b} {h} {d}".format(l=lines_filename, b=bars_filename, h=histogram_filename, d=destination)
sub_output = json.loads(subprocess.check_output(command, shell=True))
output = {}
output["report"] = dxpy.dxlink(sub_output["recordId"])
return output |
java | public ScheduledInstancesNetworkInterface withPrivateIpAddressConfigs(ScheduledInstancesPrivateIpAddressConfig... privateIpAddressConfigs) {
if (this.privateIpAddressConfigs == null) {
setPrivateIpAddressConfigs(new com.amazonaws.internal.SdkInternalList<ScheduledInstancesPrivateIpAddressConfig>(privateIpAddressConfigs.length));
}
for (ScheduledInstancesPrivateIpAddressConfig ele : privateIpAddressConfigs) {
this.privateIpAddressConfigs.add(ele);
}
return this;
} |
java | public Observable<Void> resubmitAsync(String resourceGroupName, String workflowName, String triggerName, String historyName) {
return resubmitWithServiceResponseAsync(resourceGroupName, workflowName, triggerName, historyName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
} |
java | private void performNextWrite() {
Append append = getNextAppend();
if (append == null) {
return;
}
long traceId = LoggerHelpers.traceEnter(log, "storeAppend", append);
Timer timer = new Timer();
storeAppend(append)
.whenComplete((v, e) -> {
handleAppendResult(append, e, timer);
LoggerHelpers.traceLeave(log, "storeAppend", traceId, v, e);
})
.whenComplete((v, e) -> append.getData().release());
} |
python | def pip(self, points, sorted_col=0, radius=0):
"""
Point-in-Polygon for the z=0 projection. This function enhances
the performance of ``Polygon.contains()`` by verifying only the
points which are inside the bounding box of the polygon. To do
it fast, it needs the points array to be already sorted by one
column.
:param points: list of *(x, y, z) or (x, y)* coordinates of the
points to check. (The z value will not be taken into
account).
:type points: ndarray (shape=(N, 2 or 3))
:param sorted_col: Index of the sorted column (0 or 1).
:type sorted_col: int
:param radius: Enlarge Polygons domain by a specified quantity.
:type radius: float
:returns: Which points are inside the polygon.
:rtype: ndarray (dtpye=bool)
.. warning:: By default pip considers that the set of points is
currently sorted by the first column.
.. warning:: This method only works if the polygon has been
locked (:func:`lock`).
"""
xy = points[:, :2]
n_points = xy.shape[0]
index = np.arange(n_points, dtype = int)
b = self.domain
b[0] = b[0] - radius
b[1] = b[1] + radius
# Slicing the sorted column
k = np.searchsorted(xy[:, sorted_col],
(b[0, sorted_col], b[1, sorted_col]+1e-10))
xy = xy[k[0]:k[1]]
index = index[k[0]:k[1]]
# solution
k = index[self.path.contains_points(xy, radius=radius)]
sol = np.zeros(n_points, dtype=bool)
sol[k] = True
return sol |
java | private void writeObject(ObjectOutputStream outStream) throws IOException {
PutField fields = outStream.putFields();
fields.put(BEGIN_DEFAULT, beginDefaultContext);
outStream.writeFields();
} |
python | def palettize(arr, colors, values):
"""From start *values* apply *colors* to *data*.
"""
new_arr = np.digitize(arr.ravel(),
np.concatenate((values,
[max(np.nanmax(arr),
values.max()) + 1])))
new_arr -= 1
new_arr = new_arr.clip(min=0, max=len(values) - 1)
try:
new_arr = np.ma.array(new_arr.reshape(arr.shape), mask=arr.mask)
except AttributeError:
new_arr = new_arr.reshape(arr.shape)
return new_arr, tuple(colors) |
python | def map_data(self, map_name):
"""Return the map data for a map by name or path."""
with gfile.Open(os.path.join(self.data_dir, "Maps", map_name), "rb") as f:
return f.read() |
python | def settlement_method(self):
"""
[str] 交割方式,’CashSettlementRequired’ - 现金交割, ‘PhysicalSettlementRequired’ - 实物交割(期货专用)
"""
try:
return self.__dict__["settlement_method"]
except (KeyError, ValueError):
raise AttributeError(
"Instrument(order_book_id={}) has no attribute 'settlement_method' ".format(self.order_book_id)
) |
java | @Override
public Date getPreviousStart(long base, int prevRawOffset, int prevDSTSavings, boolean inclusive) {
int[] fields = Grego.timeToFields(base, null);
int year = fields[0];
if (year > endYear) {
return getFinalStart(prevRawOffset, prevDSTSavings);
}
Date d = getStartInYear(year, prevRawOffset, prevDSTSavings);
if (d != null && (d.getTime() > base || (!inclusive && (d.getTime() == base)))) {
d = getStartInYear(year - 1, prevRawOffset, prevDSTSavings);
}
return d;
} |
java | public DoubleMatrix2D solve(DoubleMatrix2D B) {
if (B.rows() != dim) {
log.error("wrong dimension of vector b: expected " + dim + ", actual " + B.rows());
throw new RuntimeException("wrong dimension of vector b: expected " + dim + ", actual " + B.rows());
}
// with scaling, we must solve U.Q.U.z = U.b, after that we have x = U.z
if (this.rescaler != null) {
// B = ALG.mult(this.U, B);
B = ColtUtils.diagonalMatrixMult(this.U, B);
}
int nOfColumns = B.columns();
// Solve LY = B (the same as L.Yc = Bc for each column Yc e Bc)
final double[][] Y = new double[dim][nOfColumns];
for (int i = 0; i < diagonalLength; i++) {
double LII = LData[0][i];
double[] YI = Y[i];
DoubleMatrix1D BI = B.viewRow(i);
for (int col = 0; col < nOfColumns; col++) {
YI[col] = BI.getQuick(col) / LII;
}
}
for (int i = diagonalLength; i < dim; i++) {
double[] LI = LData[i - diagonalLength + 1];
double LII = LI[i];
double[] sum = new double[nOfColumns];
for (int j = 0; j < i; j++) {
double LIJ = LI[j];
double[] YJ = Y[j];
for (int col = 0; col < nOfColumns; col++) {
sum[col] += LIJ * YJ[col];
}
}
double[] YI = Y[i];
DoubleMatrix1D BI = B.viewRow(i);
for (int col = 0; col < nOfColumns; col++) {
YI[col] = (BI.getQuick(col) - sum[col]) / LII;
}
}
// Solve L[T].X = Y (the same as L[T].Xc = Yc for each column)
final DoubleMatrix2D X = F2.make(dim, nOfColumns);
for (int i = dim - 1; i > diagonalLength - 1; i--) {
double LII = LData[i - diagonalLength + 1][i];
double[] sum = new double[nOfColumns];
for (int j = dim - 1; j > i; j--) {
double[] LJ = LData[j - diagonalLength + 1];
double LJI = LJ[i];
DoubleMatrix1D XJ = X.viewRow(j);
for (int col = 0; col < nOfColumns; col++) {
sum[col] += LJI * XJ.getQuick(col);
}
}
DoubleMatrix1D XI = X.viewRow(i);
double[] YI = Y[i];
for (int col = 0; col < nOfColumns; col++) {
XI.setQuick(col, (YI[col] - sum[col]) / LII);
}
}
for (int i = diagonalLength - 1; i > -1; i--) {
double LII = LData[0][i];
double[] sum = new double[nOfColumns];
for (int j = dim - 1; j > diagonalLength - 1; j--) {
double[] LJ = LData[j - diagonalLength + 1];
double LJI = LJ[i];
DoubleMatrix1D XJ = X.viewRow(j);
for (int col = 0; col < nOfColumns; col++) {
sum[col] += LJI * XJ.getQuick(col);
}
}
DoubleMatrix1D XI = X.viewRow(i);
double[] YI = Y[i];
for (int col = 0; col < nOfColumns; col++) {
XI.setQuick(col, (YI[col] - sum[col]) / LII);
}
}
if (this.rescaler != null) {
// return ALG.mult(this.U, X);
return ColtUtils.diagonalMatrixMult(this.U, X);
} else {
return X;
}
} |
java | private void buildSubJobBatchWorkUnits() {
List<Flow> flows = this.split.getFlows();
parallelBatchWorkUnits = new ArrayList<BatchFlowInSplitWorkUnit>();
// Build all sub jobs from flows in split
synchronized (subJobs) {
for (Flow flow : flows) {
subJobs.add(PartitionedStepBuilder.buildFlowInSplitSubJob(jobContext, this.split, flow));
}
// Go back to earlier idea that we may have seen this id before, and need a special "always restart" behavior
// for split-flows.
for (JSLJob job : subJobs) {
int count = batchKernel.getJobInstanceCount(job.getId());
FlowInSplitBuilderConfig config = new FlowInSplitBuilderConfig(job, completedWorkQueue, rootJobExecutionId);
if (count == 0) {
parallelBatchWorkUnits.add(batchKernel.buildNewFlowInSplitWorkUnit(config));
} else if (count == 1) {
parallelBatchWorkUnits.add(batchKernel.buildOnRestartFlowInSplitWorkUnit(config));
} else {
throw new IllegalStateException("There is an inconsistency somewhere in the internal subjob creation");
}
}
}
} |
python | def calculate_size(name, include_value, local_only):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += BOOLEAN_SIZE_IN_BYTES
data_size += BOOLEAN_SIZE_IN_BYTES
return data_size |
java | Observable<ChatResult> synchroniseConversation(String conversationId) {
return checkState().flatMap(client -> client.service().messaging()
.queryMessages(conversationId, null, 1)
.map(result -> {
if (result.isSuccessful() && result.getResult() != null) {
return (long) result.getResult().getLatestEventId();
}
return -1L;
})
.flatMap(result -> persistenceController.getConversation(conversationId).map(loaded -> compare(result, loaded)))
.flatMap(this::updateLocalConversationList)
.flatMap(result -> lookForMissingEvents(client, result))
.map(result -> new ChatResult(result.isSuccessful, null)));
} |
python | def convert(ast):
"""Convert BEL1 AST Function to BEL2 AST Function"""
if ast and ast.type == "Function":
# Activity function conversion
if (
ast.name != "molecularActivity"
and ast.name in spec["namespaces"]["Activity"]["list"]
):
print("name", ast.name, "type", ast.type)
ast = convert_activity(ast)
return ast # Otherwise - this will trigger on the BEL2 molecularActivity
# translocation conversion
elif ast.name in ["tloc", "translocation"]:
ast = convert_tloc(ast)
fus_flag = False
for idx, arg in enumerate(ast.args):
if arg.__class__.__name__ == "Function":
# Fix substitution -> variation()
if arg.name in ["sub", "substitution"]:
ast.args[idx] = convert_sub(arg)
elif arg.name in ["trunc", "truncation"]:
ast.args[idx] = convert_trunc(arg)
elif arg.name in ["pmod", "proteinModification"]:
ast.args[idx] = convert_pmod(arg)
elif arg.name in ["fus", "fusion"]:
fus_flag = True
# Recursively process Functions
ast.args[idx] = convert(ast.args[idx])
if fus_flag:
ast = convert_fus(ast)
return ast |
python | def _filter_unique_identities(uidentities, matcher):
"""Filter a set of unique identities.
This function will use the `matcher` to generate a list
of `FilteredIdentity` objects. It will return a tuple
with the list of filtered objects, the unique identities
not filtered and a table mapping uuids with unique
identities.
"""
filtered = []
no_filtered = []
uuids = {}
for uidentity in uidentities:
n = len(filtered)
filtered += matcher.filter(uidentity)
if len(filtered) > n:
uuids[uidentity.uuid] = uidentity
else:
no_filtered.append([uidentity])
return filtered, no_filtered, uuids |
python | def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None |
python | def namedb_get_names_owned_by_address( cur, address, current_block ):
"""
Get the list of non-expired, non-revoked names owned by an address.
Only works if there is a *singular* address for the name.
"""
unexpired_fragment, unexpired_args = namedb_select_where_unexpired_names( current_block )
select_query = "SELECT name FROM name_records JOIN namespaces ON name_records.namespace_id = namespaces.namespace_id " + \
"WHERE name_records.address = ? AND name_records.revoked = 0 AND " + unexpired_fragment + ";"
args = (address,) + unexpired_args
name_rows = namedb_query_execute( cur, select_query, args )
names = []
for name_row in name_rows:
names.append( name_row['name'] )
if len(names) == 0:
return None
else:
return names |
python | def _timing_char(message):
"""
>>> message = 'MORSE CODE'
>>> _timing_char(message)
'M------ O---------- R------ S---- E C---------- O---------- D------ E'
"""
s = ''
inter_symb = ' '
inter_char = ' ' * 3
inter_word = inter_symb * 7
for i, word in enumerate(_split_message(message)):
if i >= 1:
s += inter_word
for j, c in enumerate(word):
if j != 0:
s += inter_char
s += _char_to_string_binary(c, align=ALIGN.LEFT)
return s |
java | public static int probRound(double value, Random rand) {
if (value >= 0) {
double lower = Math.floor(value);
double prob = value - lower;
if (rand.nextDouble() < prob) {
return (int)lower + 1;
} else {
return (int)lower;
}
} else {
double lower = Math.floor(Math.abs(value));
double prob = Math.abs(value) - lower;
if (rand.nextDouble() < prob) {
return -((int)lower + 1);
} else {
return -(int)lower;
}
}
} |
java | public static long randomLongLessThan(long maxExclusive) {
checkArgument(
maxExclusive > Long.MIN_VALUE, "Cannot produce long less than %s", Long.MIN_VALUE);
return randomLong(Long.MIN_VALUE, maxExclusive);
} |
python | def next(self):
"""
Gets next entry as a dictionary.
Returns:
object - Object key/value pair representing a row.
{key1: value1, key2: value2, ...}
"""
try:
entry = {}
row = self._csv_reader.next()
for i in range(0, len(row)):
entry[self._headers[i]] = row[i]
return entry
except Exception as e:
# close our file when we're done reading.
self._file.close()
raise e |
python | def get_keys(self, lst):
"""
return a list of pk values from object list
"""
pk_name = self.get_pk_name()
if self.is_pk_composite():
return [[getattr(item, pk) for pk in pk_name] for item in lst]
else:
return [getattr(item, pk_name) for item in lst] |
python | def sI(qubit: Qubit, coefficient: complex = 1.0) -> Pauli:
"""Return the Pauli sigma_I (identity) operator. The qubit is irrelevant,
but kept as an argument for consistency"""
return Pauli.sigma(qubit, 'I', coefficient) |
python | def called_with(self, *args, **kwargs):
"""
Before evaluating subsequent predicates, calls :attr:`subject` with given arguments (but unlike a direct call,
catches and transforms any exceptions that arise during the call).
"""
self._args = args
self._kwargs = kwargs
self._call_subject = True
return CallableInspector(self) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.