language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def children_to_list(node):
"""Organize children structure."""
if node['type'] == 'item' and len(node['children']) == 0:
del node['children']
else:
node['type'] = 'folder'
node['children'] = list(node['children'].values())
node['children'].sort(key=lambda x: x['name'])
node['children'] = map(children_to_list, node['children'])
return node |
python | def nemo_accpars(self,vo,ro):
"""
NAME:
nemo_accpars
PURPOSE:
return the accpars potential parameters for use of this potential with NEMO
INPUT:
vo - velocity unit in km/s
ro - length unit in kpc
OUTPUT:
accpars string
HISTORY:
2014-12-18 - Written - Bovy (IAS)
"""
try:
return self._nemo_accpars(vo,ro)
except AttributeError:
raise AttributeError('NEMO acceleration parameters not supported for %s' % self.__class__.__name__) |
java | public static String format(String str, Object... args) {
str = str.replaceAll("\\{}", "%s");
return String.format(str, args);
} |
python | def calculate(self):
"""do the TPM calculation"""
self._calculated = True
for name in self.transcripts:
self.transcripts[name]['RPK'] = (float(self.transcripts[name]['count'])/float(self.transcripts[name]['length']))/float(1000)
tot = 0.0
for name in self.transcripts:
tot += self.transcripts[name]['RPK']
tot = tot/float(1000000)
for name in self.transcripts:
self.transcripts[name]['TPM'] = self.transcripts[name]['RPK']/tot
"""do the FPKM calculation"""
tot = 0
for name in self.transcripts: tot += self.transcripts[name]['count']
tot = float(tot)/float(1000000)
for name in self.transcripts:
if tot > 0:
rpm = float(self.transcripts[name]['count'])/float(tot)
self.transcripts[name]['FPKM'] = 1000*rpm/self.transcripts[name]['length'] |
python | def infos(self):
""":py:class:`OrbitInfos` object of ``self``
"""
if not hasattr(self, '_infos'):
self._infos = OrbitInfos(self)
return self._infos |
python | def get_fault_type_dummy_variables(self, rup):
"""
Fault-type classification dummy variable based on rup.rake.
"``H`` is 1 for a strike-slip mechanism and 0 for a reverse mechanism"
(p. 1201).
Note:
UserWarning is raised if mechanism is determined to be normal
faulting, since as summarized in Table 2 on p. 1197 the data used
for regression included only reverse and stike-slip events.
"""
# normal faulting
is_normal = np.array(
self.RAKE_THRESH < -rup.rake < (180. - self.RAKE_THRESH))
# reverse raulting
is_reverse = np.array(
self.RAKE_THRESH < rup.rake < (180. - self.RAKE_THRESH))
if not self.ALREADY_WARNED and is_normal.any():
# make sure that the warning is printed only once to avoid
# flooding the terminal
msg = ('Normal faulting not supported by %s; '
'treating as strike-slip' % type(self).__name__)
warnings.warn(msg, UserWarning)
self.ALREADY_WARNED = True
is_strike_slip = ~is_reverse | is_normal
is_strike_slip = is_strike_slip.astype(float)
return is_strike_slip |
java | public static String paragraphs(int paragraphCount, boolean supplemental) {
List<String> paragraphList = new ArrayList<String>();
for (int i = 0; i < paragraphCount; i++) {
paragraphList.add(paragraph(3, supplemental, 3));
}
String joined = StringUtils.join(paragraphList, "\n\n");
return joined;
} |
java | @Override
public void writeBinaryData(byte []sBuf, int sOffset, int sLength)
{
byte []tBuf = _buffer;
int tOffset = _offset;
int tLength = tBuf.length;
int end = sOffset + sLength;
while (sOffset < end) {
if (tLength - tOffset < 1) {
tOffset = flush(tOffset);
}
int sublen = Math.min(tLength - tOffset, end - sOffset);
System.arraycopy(sBuf, sOffset, tBuf, tOffset, sublen);
tOffset += sublen;
sOffset += sublen;
}
_offset = tOffset;
} |
python | def to_float(value, default=_marker):
"""Converts the passed in value to a float number
:param value: The value to be converted to a floatable number
:type value: str, float, int
:returns: The float number representation of the passed in value
:rtype: float
"""
if not is_floatable(value):
if default is not _marker:
return to_float(default)
fail("Value %s is not floatable" % repr(value))
return float(value) |
java | public static ConfluentRegistryAvroDeserializationSchema<GenericRecord> forGeneric(Schema schema, String url,
int identityMapCapacity) {
return new ConfluentRegistryAvroDeserializationSchema<>(
GenericRecord.class,
schema,
new CachedSchemaCoderProvider(url, identityMapCapacity));
} |
python | def on_message(self, handler, msg):
""" In remote debugging mode this simply acts as a forwarding
proxy for the two clients.
"""
if self.remote_debugging:
#: Forward to other clients
for h in self.handlers:
if h != handler:
h.write_message(msg, True)
else:
print(msg) |
java | @Override
public int getNumberOfDevices() {
if (numberOfDevices.get() < 0) {
synchronized (this) {
if (numberOfDevices.get() < 1) {
numberOfDevices.set(NativeOpsHolder.getInstance().getDeviceNativeOps().getAvailableDevices());
}
}
}
return numberOfDevices.get();
} |
java | @Override
public void resetPMICounters() {
// TODO needs to change if cache provider supports PMI counters.
final String methodName = "resetPMICounters()";
if (tc.isDebugEnabled()) {
Tr.debug(tc, methodName + " cacheName=" + cacheName);
}
} |
java | protected byte[] serializeStreamValue(final int iIndex) throws IOException {
if (serializedValues[iIndex] <= 0) {
// NEW OR MODIFIED: MARSHALL CONTENT
OProfiler.getInstance().updateCounter("OMVRBTreeMapEntry.serializeValue", 1);
return ((OMVRBTreeMapProvider<K, V>) treeDataProvider).valueSerializer.toStream(values[iIndex]);
}
// RETURN ORIGINAL CONTENT
return stream.getAsByteArray(serializedValues[iIndex]);
} |
java | @Override
public void run()
{
try {
while (! isClosed() && _is.read() > 0 && _in.readMessage(_is)) {
ServiceRef.flushOutbox();
}
} catch (EOFException e) {
log.finer(this + " end of file");
if (log.isLoggable(Level.ALL)) {
log.log(Level.ALL, e.toString(), e);
}
} catch (SocketException e) {
e.printStackTrace();
log.finer(this + " socket closed:" + e);
if (log.isLoggable(Level.ALL)) {
log.log(Level.ALL, e.toString(), e);
}
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
} catch (Throwable e) {
e.printStackTrace();
throw e;
} finally {
close();
}
} |
python | def to_json(self):
""" Creates a JSON serializable representation of this instance
Returns:
:obj:`dict`: For example,
{
"lat": 9.3470298,
"lon": 3.79274,
"time": "2016-07-15T15:27:53.574110"
}
"""
return {
'lat': self.lat,
'lon': self.lon,
'time': self.time.isoformat() if self.time is not None else None
} |
python | def _get_next_server(self):
"""Returns a valid redis server or raises a TransportException"""
current_try = 0
max_tries = len(self._servers)
while current_try < max_tries:
server_index = self._raise_server_index()
server = self._servers[server_index]
down_until = server['down_until']
self._logger.debug('Checking server ' + str(current_try + 1) + '/' + str(max_tries) + ': ' + server['url'])
if down_until == 0:
self._logger.debug('Elected server: ' + server['url'])
return server
if down_until < time.time():
if self._is_reachable(server):
server['down_until'] = 0
self._logger.debug('Elected server: ' + server['url'])
return server
else:
self._logger.debug('Server still unavailable: ' + server['url'])
server['down_until'] = time.time() + 5
current_try += 1
raise TransportException('Cannot reach any redis server') |
java | public OvhVrackNetwork serviceName_vrack_network_vrackNetworkId_GET(String serviceName, Long vrackNetworkId) throws IOException {
String qPath = "/ipLoadbalancing/{serviceName}/vrack/network/{vrackNetworkId}";
StringBuilder sb = path(qPath, serviceName, vrackNetworkId);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhVrackNetwork.class);
} |
python | def project(self, points):
"""Project 3D points to image coordinates.
This projects 3D points expressed in the camera coordinate system to image points.
Parameters
--------------------
points : (3, N) ndarray
3D points
Returns
--------------------
image_points : (2, N) ndarray
The world points projected to the image plane
"""
K = self.camera_matrix
XU = points
XU = XU / np.tile(XU[2], (3,1))
X = self.apply(XU)
x2d = np.dot(K, X)
return from_homogeneous(x2d) |
python | def _post(self, url, data):
"""
Helper method: POST data to a given URL on TBA's API.
:param url: URL string to post data to and hash.
:pararm data: JSON data to post and hash.
:return: Requests Response object.
"""
return self.session.post(self.WRITE_URL_PRE + url % self.event_key, data=data, headers={'X-TBA-Auth-Sig': md5((self.auth_secret + '/api/trusted/v1/' + url % self.event_key + data).encode('utf-8')).hexdigest()}) |
python | def asignTopUnit(self, top, topName):
"""
Set hwt unit as template for component
"""
self._top = top
self.name = topName
pack = self._packager
self.model.addDefaultViews(topName, pack.iterParams(top))
for intf in pack.iterInterfaces(self._top):
self.registerInterface(intf)
if intf._isExtern:
self.busInterfaces.append(intf)
self.busInterfaces.sort(key=lambda x: x._name)
for intf in self.busInterfaces:
biClass = None
try:
biClass = intf._getIpCoreIntfClass()
except IntfIpMetaNotSpecified:
pass
if biClass is not None:
bi = BusInterface.fromBiClass(intf, biClass, self._packager)
intf._bi = bi
bi.busType.postProcess(self, self._packager, intf)
# generate component parameters
compNameParam = Parameter()
compNameParam.name = "Component_Name"
compNameParam.value = Value()
v = compNameParam.value
v.id = "PARAM_VALUE.Component_Name"
v.resolve = "user"
v.text = self.name
self.parameters.append(compNameParam)
# generic as parameters
for _p in pack.iterParams(self._top):
p = Parameter()
p.name = pack.getParamPhysicalName(_p)
p.value = self._packager.paramToIpValue(
"PARAM_VALUE.", _p, Value.RESOLVE_USER)
self.parameters.append(p) |
python | def print_detail_scan_summary(json_data, names=None):
'''
Print a detailed summary of the data returned from
a CVE scan.
'''
clean = True
sevs = ['Critical', 'Important', 'Moderate', 'Low']
cve_summary = json_data['host_results']
image_template = " {0:10}: {1}"
cve_template = " {0:10}: {1}"
for image in cve_summary.keys():
image_res = cve_summary[image]
writeOut("")
writeOut(image[:12])
if not image_res['isRHEL']:
writeOut(image_template.format("Result",
"Not based on Red Hat"
"Enterprise Linux"))
continue
else:
writeOut(image_template.format("OS", image_res['os'].rstrip()))
scan_results = image_res['cve_summary']['scan_results']
for sev in sevs:
if sev in scan_results:
clean = False
writeOut(image_template.format(sev,
str(scan_results[sev]['num'])))
for cve in scan_results[sev]['cves']:
writeOut(cve_template.format("CVE", cve['cve_title']))
writeOut(cve_template.format("CVE URL",
cve['cve_ref_url']))
writeOut(cve_template.format("RHSA ID",
cve['rhsa_ref_id']))
writeOut(cve_template.format("RHSA URL",
cve['rhsa_ref_url']))
writeOut("")
return clean |
java | public void addComparator(Comparator<T> comparator, boolean reverse) {
checkLocked();
comparatorChain.add(comparator);
if (reverse == true) {
orderingBits.set(comparatorChain.size() - 1);
}
} |
java | public SynchronizeFxServer newChannel(final Object root, final String channelName,
final Executor modelChangeExecutor, final ServerCallback callback) {
synchronized (channels) {
if (channels.containsKey(channelName)) {
throw new IllegalArgumentException("A new SynchronizeFX channel with the name \"" + channelName
+ "\" should be created a channel with this name does already exist.");
}
final SynchronizeFXWebsocketChannel channel = new SynchronizeFXWebsocketChannel(this, serializer);
final SynchronizeFxServer server =
modelChangeExecutor == null ? new SynchronizeFxServer(root, channel, callback)
: new SynchronizeFxServer(root, channel, modelChangeExecutor, callback);
channels.put(channelName, channel);
servers.put(server, channel);
return server;
}
} |
python | def get_interface_detail_output_interface_logical_hardware_address(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_interface_detail = ET.Element("get_interface_detail")
config = get_interface_detail
output = ET.SubElement(get_interface_detail, "output")
interface = ET.SubElement(output, "interface")
interface_type_key = ET.SubElement(interface, "interface-type")
interface_type_key.text = kwargs.pop('interface_type')
interface_name_key = ET.SubElement(interface, "interface-name")
interface_name_key.text = kwargs.pop('interface_name')
logical_hardware_address = ET.SubElement(interface, "logical-hardware-address")
logical_hardware_address.text = kwargs.pop('logical_hardware_address')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
java | protected void logResources() throws SystemException {
if (tc.isEntryEnabled())
Tr.entry(tc, "logResources", _resourcesLogged);
if (!_resourcesLogged) {
for (int i = 0; i < _resourceObjects.size(); i++) {
final JTAResource resource = _resourceObjects.get(i);
if (resource.getResourceStatus() == StatefulResource.PREPARED) {
recordLog(resource);
}
}
_resourcesLogged = true;
}
if (tc.isEntryEnabled())
Tr.exit(tc, "logResources");
} |
python | def _validate_jp2h(self, boxes):
"""Validate the JP2 Header box."""
self._check_jp2h_child_boxes(boxes, 'top-level')
jp2h_lst = [box for box in boxes if box.box_id == 'jp2h']
jp2h = jp2h_lst[0]
# 1st jp2 header box cannot be empty.
if len(jp2h.box) == 0:
msg = "The JP2 header superbox cannot be empty."
raise IOError(msg)
# 1st jp2 header box must be ihdr
if jp2h.box[0].box_id != 'ihdr':
msg = ("The first box in the jp2 header box must be the image "
"header box.")
raise IOError(msg)
# colr must be present in jp2 header box.
colr_lst = [j for (j, box) in enumerate(jp2h.box)
if box.box_id == 'colr']
if len(colr_lst) == 0:
msg = "The jp2 header box must contain a color definition box."
raise IOError(msg)
colr = jp2h.box[colr_lst[0]]
self._validate_channel_definition(jp2h, colr) |
java | public ImageDescriptor image(SarlAgent agent) {
final JvmDeclaredType jvmElement = this.jvmModelAssociations.getInferredType(agent);
return this.images.forAgent(
agent.getVisibility(),
this.adornments.get(jvmElement));
} |
java | public Integer getMaxStatements()
{
if (childNode.getTextValueForPatternName("max-statements") != null && !childNode.getTextValueForPatternName("max-statements").equals("null")) {
return Integer.valueOf(childNode.getTextValueForPatternName("max-statements"));
}
return null;
} |
java | public static SparseVector fromCollection(Collection<? extends Number> list) {
return Vector.fromCollection(list).to(Vectors.SPARSE);
} |
java | public static boolean isKnownEOFException (@Nullable final Class <?> aClass)
{
if (aClass == null)
return false;
final String sClass = aClass.getName ();
return sClass.equals ("java.io.EOFException") ||
sClass.equals ("org.mortbay.jetty.EofException") ||
sClass.equals ("org.eclipse.jetty.io.EofException") ||
sClass.equals ("org.apache.catalina.connector.ClientAbortException");
} |
java | private static Collection<JobIdWithStatus> combine(
Collection<JobIdWithStatus> first,
Collection<JobIdWithStatus> second) {
checkNotNull(first);
checkNotNull(second);
ArrayList<JobIdWithStatus> result = new ArrayList<>(first.size() + second.size());
result.addAll(first);
result.addAll(second);
return result;
} |
python | def get_starred_segments(self, limit=None):
"""
Returns a summary representation of the segments starred by the
authenticated user. Pagination is supported.
http://strava.github.io/api/v3/segments/#starred
:param limit: (optional), limit number of starred segments returned.
:type limit: int
:return: An iterator of :class:`stravalib.model.Segment` starred by authenticated user.
:rtype: :class:`BatchedResultsIterator`
"""
params = {}
if limit is not None:
params["limit"] = limit
result_fetcher = functools.partial(self.protocol.get,
'/segments/starred')
return BatchedResultsIterator(entity=model.Segment,
bind_client=self,
result_fetcher=result_fetcher,
limit=limit) |
python | def predict_from_variants(
self,
variants,
transcript_expression_dict=None,
gene_expression_dict=None):
"""
Predict epitopes from a Variant collection, filtering options, and
optional gene and transcript expression data.
Parameters
----------
variants : varcode.VariantCollection
transcript_expression_dict : dict
Maps from Ensembl transcript IDs to FPKM expression values.
gene_expression_dict : dict, optional
Maps from Ensembl gene IDs to FPKM expression values.
Returns DataFrame with the following columns:
- variant
- gene
- gene_id
- transcript_id
- transcript_name
- effect
- effect_type
- peptide
- peptide_offset
- peptide_length
- allele
- affinity
- percentile_rank
- prediction_method_name
- contains_mutant_residues
- mutation_start_in_peptide
- mutation_end_in_peptide
Optionall will also include the following columns if corresponding
expression dictionary inputs are provided:
- gene_expression
- transcript_expression
"""
# pre-filter variants by checking if any of the genes or
# transcripts they overlap have sufficient expression.
# I'm tolerating the redundancy of this code since it's much cheaper
# to filter a variant *before* trying to predict its impact/effect
# on the protein sequence.
variants = apply_variant_expression_filters(
variants,
transcript_expression_dict=transcript_expression_dict,
transcript_expression_threshold=self.min_transcript_expression,
gene_expression_dict=gene_expression_dict,
gene_expression_threshold=self.min_gene_expression)
effects = variants.effects(raise_on_error=self.raise_on_error)
return self.predict_from_mutation_effects(
effects=effects,
transcript_expression_dict=transcript_expression_dict,
gene_expression_dict=gene_expression_dict) |
python | def iter_islast(iterable):
"""Generate (item, islast) pairs for an iterable.
Generates pairs where the first element is an item from the iterable
source and the second element is a boolean flag indicating if it is
the last item in the sequence.
"""
it = iter(iterable)
prev = next(it)
for item in it:
yield prev, False
prev = item
yield prev, True |
python | def censor(input_text):
""" Returns the input string with profanity replaced with a random string
of characters plucked from the censor_characters pool.
"""
ret = input_text
words = get_words()
for word in words:
curse_word = re.compile(re.escape(word), re.IGNORECASE)
cen = "".join(get_censor_char() for i in list(word))
ret = curse_word.sub(cen, ret)
return ret |
java | public void checkBlockableReadSequence(long readSequence) {
if (isTooLargeSequence(readSequence)) {
throw new IllegalArgumentException("sequence:" + readSequence
+ " is too large. The current tailSequence is:" + tailSequence());
}
if (isStaleSequence(readSequence)) {
throw new StaleSequenceException("sequence:" + readSequence
+ " is too small and data store is disabled. "
+ "The current headSequence is:" + headSequence()
+ " tailSequence is:" + tailSequence(), headSequence());
}
} |
java | protected File asAbsoluteFile( File f )
{
if ( f.isAbsolute() )
{
return f;
}
return new File( getBasedir(), f.getPath() );
} |
java | @Override
public void onDestroy() {
super.onDestroy();
eventRegister.unregisterEventBuses();
if (getControllerClass() != null) {
try {
Mvc.graph().dereference(controller, getControllerClass(), null);
} catch (ProviderMissingException e) {
//should never happen
Logger logger = LoggerFactory.getLogger(getClass());
logger.warn("Failed to dereference controller " + getControllerClass().getName(), e);
}
}
Mvc.graph().release(this);
Mvc.graph().unregisterMonitor(graphMonitor);
} |
python | def save_riskmodel(self):
"""
Save the risk models in the datastore
"""
self.datastore['risk_model'] = rm = self.riskmodel
self.datastore['taxonomy_mapping'] = self.riskmodel.tmap
attrs = self.datastore.getitem('risk_model').attrs
attrs['min_iml'] = hdf5.array_of_vstr(sorted(rm.min_iml.items()))
self.datastore.set_nbytes('risk_model') |
java | public Promise<Void> force(boolean metaData) {
return sanitize(ofBlockingRunnable(executor, () -> {
try {
channel.force(metaData);
} catch (IOException e) {
throw new UncheckedException(e);
}
}));
} |
python | def enable_firewall_ruleset(host,
username,
password,
ruleset_enable,
ruleset_name,
protocol=None,
port=None,
esxi_hosts=None,
credstore=None):
'''
Enable or disable an ESXi firewall rule set.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
ruleset_enable
True to enable the ruleset, false to disable.
ruleset_name
Name of ruleset to target.
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
esxi_hosts
If ``host`` is a vCenter host, then use esxi_hosts to execute this function
on a list of one or more ESXi machines.
credstore
Optionally set to path to the credential store file.
:return: A standard cmd.run_all dictionary, per host.
CLI Example:
.. code-block:: bash
# Used for ESXi host connection information
salt '*' vsphere.enable_firewall_ruleset my.esxi.host root bad-password True 'syslog'
# Used for connecting to a vCenter Server
salt '*' vsphere.enable_firewall_ruleset my.vcenter.location root bad-password True 'syslog' \
esxi_hosts='[esxi-1.host.com, esxi-2.host.com]'
'''
cmd = 'network firewall ruleset set --enabled {0} --ruleset-id={1}'.format(
ruleset_enable, ruleset_name
)
ret = {}
if esxi_hosts:
if not isinstance(esxi_hosts, list):
raise CommandExecutionError('\'esxi_hosts\' must be a list.')
for esxi_host in esxi_hosts:
response = salt.utils.vmware.esxcli(host, username, password, cmd,
protocol=protocol, port=port,
esxi_host=esxi_host, credstore=credstore)
ret.update({esxi_host: response})
else:
# Handles a single host or a vCenter connection when no esxi_hosts are provided.
response = salt.utils.vmware.esxcli(host, username, password, cmd,
protocol=protocol, port=port,
credstore=credstore)
ret.update({host: response})
return ret |
java | private static ReloadableType searchForReloadableType(int typeId, TypeRegistry typeRegistry) {
ReloadableType reloadableType;
reloadableType = typeRegistry.getReloadableTypeInTypeRegistryHierarchy(
NameRegistry.getTypenameById(typeId));
typeRegistry.rememberReloadableType(typeId, reloadableType);
return reloadableType;
} |
java | public int findColumn(final String columnLabel) throws SQLException {
if (this.queryResult.getResultSetType() == ResultSetType.SELECT) {
try {
return ((SelectQueryResult) queryResult).getColumnId(columnLabel) + 1;
} catch (NoSuchColumnException e) {
throw SQLExceptionMapper.getSQLException("No such column: " + columnLabel, e);
}
}
throw SQLExceptionMapper.getSQLException("Cannot get column id of update result sets");
} |
python | def is_valid(obj: JSGValidateable, log: Optional[Union[TextIO, Logger]] = None) -> bool:
""" Determine whether obj is valid
:param obj: Object to validate
:param log: Logger to record validation failures. If absent, no information is recorded
"""
return obj._is_valid(log) |
java | public CompletableFuture<Object> putAsync(final Consumer<HttpConfig> configuration) {
return CompletableFuture.supplyAsync(() -> put(configuration), getExecutor());
} |
java | public boolean fling(float velocityX, float velocityY, float velocityZ) {
boolean scrolled = true;
float viewportX = mScrollable.getViewPortWidth();
if (Float.isNaN(viewportX)) {
viewportX = 0;
}
float maxX = Math.min(MAX_SCROLLING_DISTANCE,
viewportX * MAX_VIEWPORT_LENGTHS);
float viewportY = mScrollable.getViewPortHeight();
if (Float.isNaN(viewportY)) {
viewportY = 0;
}
float maxY = Math.min(MAX_SCROLLING_DISTANCE,
viewportY * MAX_VIEWPORT_LENGTHS);
float xOffset = (maxX * velocityX)/VELOCITY_MAX;
float yOffset = (maxY * velocityY)/VELOCITY_MAX;
Log.d(Log.SUBSYSTEM.LAYOUT, TAG, "fling() velocity = [%f, %f, %f] offset = [%f, %f]",
velocityX, velocityY, velocityZ,
xOffset, yOffset);
if (equal(xOffset, 0)) {
xOffset = Float.NaN;
}
if (equal(yOffset, 0)) {
yOffset = Float.NaN;
}
// TODO: Think about Z-scrolling
mScrollable.scrollByOffset(xOffset, yOffset, Float.NaN, mInternalScrollListener);
return scrolled;
} |
python | def __parse_blacklist(self, json):
"""Parse blacklist entries using Sorting Hat format.
The Sorting Hat blacklist format is a JSON stream that
stores a list of blacklisted entries.
Next, there is an example of a valid stream:
{
"blacklist": [
"John Doe",
"John Smith",
"[email protected]"
]
}
:param stream: stream to parse
:raises InvalidFormatError: raised when the format of the stream is
not valid.
"""
try:
for entry in json['blacklist']:
if not entry:
msg = "invalid json format. Blacklist entries cannot be null or empty"
raise InvalidFormatError(cause=msg)
excluded = self.__encode(entry)
bl = self._blacklist.get(excluded, None)
if not bl:
bl = MatchingBlacklist(excluded=excluded)
self._blacklist[excluded] = bl
except KeyError as e:
msg = "invalid json format. Attribute %s not found" % e.args
raise InvalidFormatError(cause=msg) |
java | public static List<? extends CmsPrincipal> filterFlag(List<? extends CmsPrincipal> principals, int flag) {
Iterator<? extends CmsPrincipal> it = principals.iterator();
while (it.hasNext()) {
CmsPrincipal p = it.next();
if ((p.getFlags() & flag) != flag) {
it.remove();
}
}
return principals;
} |
python | def route(self, url, host=None):
"""This is a decorator
"""
def fn(handler_cls):
handlers = self._get_handlers_on_host(host)
handlers.insert(0, (url, handler_cls))
return handler_cls
return fn |
python | def run(self):
"""Run all runners, blocking until completion or error"""
self._logger.info('starting all runners')
try:
with self._lock:
assert not self.running.set(), 'cannot re-run: %s' % self
self.running.set()
thread_runner = self.runners[threading]
for runner in self.runners.values():
if runner is not thread_runner:
thread_runner.register_payload(runner.run)
if threading.current_thread() == threading.main_thread():
asyncio_main_run(root_runner=thread_runner)
else:
thread_runner.run()
except Exception as err:
self._logger.exception('runner terminated: %s', err)
raise RuntimeError from err
finally:
self._stop_runners()
self._logger.info('stopped all runners')
self.running.clear() |
java | public static KeyStore load(String path, char[] password) {
try {
return load(new FileInputStream(path), password);
} catch (FileNotFoundException e) {
throw new TrustManagerLoadFailedException(e);
}
} |
python | def resolve(self, other: Type) -> Optional[Type]:
"""See ``PlaceholderType.resolve``"""
if not isinstance(other, NltkComplexType):
return None
expected_second = ComplexType(NUMBER_TYPE,
ComplexType(ANY_TYPE, ComplexType(ComplexType(ANY_TYPE, ANY_TYPE),
ANY_TYPE)))
resolved_second = other.second.resolve(expected_second)
if resolved_second is None:
return None
# The lambda function that we use inside the argmax must take either a number or a date as
# an argument.
lambda_arg_type = other.second.second.second.first.first
if lambda_arg_type.resolve(NUMBER_TYPE) is None and lambda_arg_type.resolve(DATE_TYPE) is None:
return None
try:
# This is the first #1 in the type signature above.
selector_function_type = resolved_second.second.first
# This is the second #1 in the type signature above.
quant_function_argument_type = resolved_second.second.second.first.second
# This is the third #1 in the type signature above.
return_type = resolved_second.second.second.second
# All three placeholder (ph) types above should resolve against each other.
resolved_first_ph = selector_function_type.resolve(quant_function_argument_type)
resolved_first_ph.resolve(return_type)
resolved_second_ph = quant_function_argument_type.resolve(resolved_first_ph)
resolved_second_ph.resolve(return_type)
resolved_third_ph = return_type.resolve(resolved_first_ph)
resolved_third_ph = return_type.resolve(resolved_second_ph)
if not resolved_first_ph or not resolved_second_ph or not resolved_third_ph:
return None
return ArgExtremeType(resolved_first_ph, lambda_arg_type)
except AttributeError:
return None |
java | public static String getShortID(String id) {
String canonicalID = getCanonicalCLDRID(id);
if (canonicalID == null) {
return null;
}
return getShortIDFromCanonical(canonicalID);
} |
java | public void materializeFullObject(Object target)
{
ClassDescriptor cld = broker.getClassDescriptor(target.getClass());
// don't force, let OJB use the user settings
final boolean forced = false;
if (forceProxies){
broker.getReferenceBroker().retrieveProxyReferences(target, cld, forced);
broker.getReferenceBroker().retrieveProxyCollections(target, cld, forced);
}else{
broker.getReferenceBroker().retrieveReferences(target, cld, forced);
broker.getReferenceBroker().retrieveCollections(target, cld, forced);
}
} |
python | def db_open(cls, impl, working_dir):
"""
Open a connection to our chainstate db
"""
path = config.get_snapshots_filename(impl, working_dir)
return cls.db_connect(path) |
python | def call_chunks(self, chunks):
'''
Iterate over a list of chunks and call them, checking for requires.
'''
# Check for any disabled states
disabled = {}
if 'state_runs_disabled' in self.opts['grains']:
for low in chunks[:]:
state_ = '{0}.{1}'.format(low['state'], low['fun'])
for pat in self.opts['grains']['state_runs_disabled']:
if fnmatch.fnmatch(state_, pat):
comment = (
'The state function "{0}" is currently disabled by "{1}", '
'to re-enable, run state.enable {1}.'
).format(
state_,
pat,
)
_tag = _gen_tag(low)
disabled[_tag] = {'changes': {},
'result': False,
'comment': comment,
'__run_num__': self.__run_num,
'__sls__': low['__sls__']}
self.__run_num += 1
chunks.remove(low)
break
running = {}
for low in chunks:
if '__FAILHARD__' in running:
running.pop('__FAILHARD__')
return running
tag = _gen_tag(low)
if tag not in running:
# Check if this low chunk is paused
action = self.check_pause(low)
if action == 'kill':
break
running = self.call_chunk(low, running, chunks)
if self.check_failhard(low, running):
return running
self.active = set()
while True:
if self.reconcile_procs(running):
break
time.sleep(0.01)
ret = dict(list(disabled.items()) + list(running.items()))
return ret |
java | private void handleQueries(
final HttpServletResponse response,
final Map<String, List<String>> queries,
final String version) throws IOException {
LOG.log(Level.INFO, "HttpServerReefEventHandler handleQueries is called");
for (final Map.Entry<String, List<String>> entry : queries.entrySet()) {
final String queryTarget = entry.getKey().toLowerCase();
switch (queryTarget) {
case "id":
if (version.equals(VER)) {
writeEvaluatorInfoJsonOutput(response, entry.getValue());
} else {
writeEvaluatorInfoWebOutput(response, entry.getValue());
}
break;
default:
response.getWriter().println("Unsupported query : " + queryTarget);
break;
}
}
} |
python | def _set_properties(self):
"""Sets dialog title and size limitations of the widgets"""
self.SetTitle("CSV Export")
self.SetSize((600, 600))
for button in [self.button_cancel, self.button_apply, self.button_ok]:
button.SetMinSize((80, 28)) |
python | def access_key(self):
"""
The access key id used to sign the request.
If the access key is not in the same credential scope as this request,
an AttributeError exception is raised.
"""
credential = self.query_parameters.get(_x_amz_credential)
if credential is not None:
credential = url_unquote(credential[0])
else:
credential = self.authorization_header_parameters.get(_credential)
if credential is None:
raise AttributeError("Credential was not passed in the request")
try:
key, scope = credential.split("/", 1)
except ValueError:
raise AttributeError("Invalid request credential: %r" % credential)
if scope != self.credential_scope:
raise AttributeError("Incorrect credential scope: %r (wanted %r)" %
(scope, self.credential_scope))
return key |
java | public static Map<String, Object> toMap(XmlReaders readers){
Node root = readers.getNode("xml");
if (root == null){
return Collections.emptyMap();
}
NodeList children = root.getChildNodes();
if (children.getLength() == 0){
return Collections.emptyMap();
}
Map<String, Object> data = new HashMap<>(children.getLength());
Node n;
for (int i = 0; i<children.getLength(); i++){
n = children.item(i);
data.put(n.getNodeName(), n.getTextContent());
}
return data;
} |
python | def create_initial_tree(channel):
""" create_initial_tree: Create initial tree structure
Args:
channel (Channel): channel to construct
Returns: tree manager to run rest of steps
"""
# Create channel manager with channel data
config.LOGGER.info(" Setting up initial channel structure... ")
tree = ChannelManager(channel)
# Make sure channel structure is valid
config.LOGGER.info(" Validating channel structure...")
channel.print_tree()
tree.validate()
config.LOGGER.info(" Tree is valid\n")
return tree |
python | def get_logger(name, CFG=None):
"""set up logging for a service using the py 2.7 dictConfig
"""
logger = logging.getLogger(name)
if CFG:
# Make log directory if it doesn't exist
for handler in CFG.get('handlers', {}).itervalues():
if 'filename' in handler:
log_dir = os.path.dirname(handler['filename'])
if not os.path.exists(log_dir):
os.makedirs(log_dir)
try:
#TODO: This requires python 2.7
logging.config.dictConfig(CFG)
except AttributeError:
print >> sys.stderr, '"logging.config.dictConfig" doesn\'t seem to be supported in your python'
raise
return logger |
python | def forward(self, inputs, begin_state=None): # pylint: disable=arguments-differ
"""Defines the forward computation. Arguments can be either
:py:class:`NDArray` or :py:class:`Symbol`.
Parameters
-----------
inputs : NDArray
input tensor with shape `(sequence_length, batch_size)`
when `layout` is "TNC".
begin_state : list
initial recurrent state tensor with length equals to num_layers-1.
the initial state with shape `(num_layers, batch_size, num_hidden)`
Returns
--------
out: NDArray
output tensor with shape `(sequence_length, batch_size, input_size)`
when `layout` is "TNC".
out_states: list
output recurrent state tensor with length equals to num_layers-1.
the state with shape `(num_layers, batch_size, num_hidden)`
encoded_raw: list
The list of last output of the model's encoder.
the shape of last encoder's output `(sequence_length, batch_size, num_hidden)`
encoded_dropped: list
The list of last output with dropout of the model's encoder.
the shape of last encoder's dropped output `(sequence_length, batch_size, num_hidden)`
"""
encoded = self.embedding(inputs)
if not begin_state:
begin_state = self.begin_state(batch_size=inputs.shape[1])
encoded_raw = []
encoded_dropped = []
encoded, state = self.encoder(encoded, begin_state)
encoded_raw.append(encoded)
if self._dropout:
encoded = nd.Dropout(encoded, p=self._dropout, axes=(0,))
out = self.decoder(encoded)
return out, state, encoded_raw, encoded_dropped |
java | public int doStartTag() throws JspException
{
if (_rolloverImage != null && getJavaScriptAttribute(ONMOUSEOVER) == null) {
// cause the roll over script to be inserted
WriteRenderAppender writer = new WriteRenderAppender(pageContext);
ScriptRequestState srs = ScriptRequestState.getScriptRequestState((HttpServletRequest) pageContext.getRequest());
srs.writeFeature(getScriptReporter(), writer, CoreScriptFeature.ROLLOVER, true, false, null);
}
return EVAL_BODY_BUFFERED;
} |
python | def _clean_empty(d):
"""Remove None values from a dict."""
if not isinstance(d, (dict, list)):
return d
if isinstance(d, list):
return [v for v in (_clean_empty(v) for v in d) if v is not None]
return {
k: v for k, v in
((k, _clean_empty(v)) for k, v in d.items())
if v is not None
} |
python | def setContext(self, font, feaFile, compiler=None):
""" Populate a temporary `self.context` namespace, which is reset
after each new call to `_write` method.
Subclasses can override this to provide contextual information
which depends on other data, or set any temporary attributes.
The default implementation sets:
- the current font;
- the current FeatureFile object;
- the current compiler instance (only present when this writer was
instantiated from a FeatureCompiler);
- a set of features (tags) to be generated. If self.mode is "skip",
these are all the features which are _not_ already present.
Returns the context namespace instance.
"""
todo = set(self.features)
if self.mode == "skip":
existing = ast.findFeatureTags(feaFile)
todo.difference_update(existing)
self.context = SimpleNamespace(
font=font, feaFile=feaFile, compiler=compiler, todo=todo
)
return self.context |
python | def set_widgets(self):
"""Set widgets on the Field tab."""
self.clear_further_steps()
purpose = self.parent.step_kw_purpose.selected_purpose()
subcategory = self.parent.step_kw_subcategory.selected_subcategory()
unit = self.parent.step_kw_unit.selected_unit()
layer_mode = self.parent.step_kw_layermode.selected_layermode()
# Set mode
# Notes(IS) I hard coded this one, need to fix it after it's working.
field_key = self.parent.field_keyword_for_the_layer()
if field_key == population_count_field['key']:
self.mode = MULTI_MODE
else:
self.mode = SINGLE_MODE
# Filtering based on field type
layer_field = definition(field_key)
layer_field_types = deepcopy(layer_field['type'])
if not isinstance(layer_field_types, list):
layer_field_types = [layer_field_types]
# Remove string for continuous layer
if layer_mode == layer_mode_continuous and unit:
if QVariant.String in layer_field_types:
layer_field_types.remove(QVariant.String)
if purpose == layer_purpose_aggregation:
question_text = field_question_aggregation
elif layer_mode == layer_mode_continuous and unit:
subcategory_unit_relation = get_question_text(
'%s_%s_question' % (subcategory['key'], unit['key']))
if 'MISSING' in subcategory_unit_relation:
subcategory_unit_relation = self.tr(
'{subcategory} in {unit} unit').format(
subcategory=subcategory['name'].lower(),
unit=unit['plural_name'])
question_text = field_question_subcategory_unit % (
purpose['name'],
subcategory['name'],
unit['name'],
subcategory_unit_relation)
else:
question_text = field_question_subcategory_classified % (
subcategory['name'].lower(), subcategory['name'].lower())
if self.mode == SINGLE_MODE:
question_text += tr('\nYou can select 1 field only.')
self.lstFields.setSelectionMode(QAbstractItemView.SingleSelection)
elif self.mode == MULTI_MODE:
question_text += tr(
'\nYou can select more than 1 field. InaSAFE will sum up the '
'value of the fields that you choose.')
self.lstFields.setSelectionMode(
QAbstractItemView.ExtendedSelection)
self.lblSelectField.setText(question_text)
self.lstFields.clear()
default_item = None
for field in self.parent.layer.fields():
# Skip if it's not in the field types requirement
if field.type() not in layer_field_types:
continue
field_name = field.name()
item = QListWidgetItem(field_name, self.lstFields)
item.setData(Qt.UserRole, field_name)
# Select the item if it match the unit's default_attribute
if unit and 'default_attribute' in unit \
and field_name == unit['default_attribute']:
default_item = item
# For continuous data, gray out id, gid, fid and text fields
if self.parent.step_kw_layermode.\
selected_layermode() == layer_mode_continuous and unit:
field_type = field.type()
if field_type > 9 or re.match('.{0,2}id$', field_name, re.I):
continue # Don't show unmatched field type
if default_item:
self.lstFields.setCurrentItem(default_item)
self.lblDescribeField.clear()
# Set values based on existing keywords (if already assigned)
field_keyword = self.parent.field_keyword_for_the_layer()
inasafe_field_keywords = self.parent.get_existing_keyword(
'inasafe_fields')
if inasafe_field_keywords:
fields = inasafe_field_keywords.get(field_keyword)
if isinstance(fields, str):
fields = [fields]
if fields:
option_fields = []
for index in range(self.lstFields.count()):
option_fields.append(
str(self.lstFields.item(index).text()))
for field in fields:
if field in option_fields:
self.lstFields.item(option_fields.index(
field)).setSelected(True)
self.auto_select_one_item(self.lstFields)
if self.selected_fields():
self.parent.pbnNext.setEnabled(True)
else:
self.parent.pbnNext.setEnabled(False) |
python | def auto_tweet(sender, instance, *args, **kwargs):
"""
Allows auto-tweeting newly created object to twitter
on accounts configured in settings.
You MUST create an app to allow oAuth authentication to work:
-- https://dev.twitter.com/apps/
You also must set the app to "Read and Write" access level,
and create an access token. Whew.
"""
if not twitter or getattr(settings, 'TWITTER_SETTINGS') is False:
#print 'WARNING: Twitter account not configured.'
return False
if not kwargs.get('created'):
return False
twitter_key = settings.TWITTER_SETTINGS
try:
api = twitter.Api(
consumer_key = twitter_key['consumer_key'],
consumer_secret = twitter_key['consumer_secret'],
access_token_key = twitter_key['access_token_key'],
access_token_secret = twitter_key['access_token_secret']
)
except Exception as error:
print("failed to authenticate: {}".format(error))
text = instance.text
if instance.link:
link = instance.link
else:
link = instance.get_absolute_url()
text = '{} {}'.format(text, link)
try:
api.PostUpdate(text)
except Exception as error:
print("Error posting to twitter: {}".format(error)) |
java | private Entry setParent(Entry entry, Entry parent) {
unlinkFromNeighbors(entry);
entry.oParent = parent;
parent.oFirstChild = mergeLists(entry, parent.oFirstChild);
parent.degree++;
entry.isMarked = false;
return parent;
} |
java | public Observable<DocumentFragment<Mutation>> execute(PersistTo persistTo, long timeout, TimeUnit timeUnit) {
return execute(persistTo, ReplicateTo.NONE, timeout, timeUnit);
} |
java | public ActivityTypeInfos withTypeInfos(ActivityTypeInfo... typeInfos) {
if (this.typeInfos == null) {
setTypeInfos(new java.util.ArrayList<ActivityTypeInfo>(typeInfos.length));
}
for (ActivityTypeInfo ele : typeInfos) {
this.typeInfos.add(ele);
}
return this;
} |
java | public Set<String> getCommonPropertyAsSet(String key) {
Set<String> propertiesSet = new HashSet<>();
StringTokenizer tk = new StringTokenizer(props.getProperty(PropertiesBundleConstant.PROPS_PREFIX + key, ""),
",");
while (tk.hasMoreTokens())
propertiesSet.add(tk.nextToken().trim());
return propertiesSet;
} |
java | public void marshall(ReservationUtilizationGroup reservationUtilizationGroup, ProtocolMarshaller protocolMarshaller) {
if (reservationUtilizationGroup == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(reservationUtilizationGroup.getKey(), KEY_BINDING);
protocolMarshaller.marshall(reservationUtilizationGroup.getValue(), VALUE_BINDING);
protocolMarshaller.marshall(reservationUtilizationGroup.getAttributes(), ATTRIBUTES_BINDING);
protocolMarshaller.marshall(reservationUtilizationGroup.getUtilization(), UTILIZATION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | private void writeAmountOfNodesBack(List<ExampleQuery> exampleQueries)
{
String sqlTemplate = "UPDATE _" + EXAMPLE_QUERIES_TAB + " SET nodes=?, used_ops=CAST(? AS text[]) WHERE example_query=?;";
for (ExampleQuery eQ : exampleQueries)
{
getJdbcTemplate().update(sqlTemplate, eQ.getNodes(), eQ.getUsedOperators(), eQ.getExampleQuery());
}
} |
java | @Override
public UserGroupInformation getProxiedUser(final Props userProp)
throws HadoopSecurityManagerException {
final String userToProxy = verifySecureProperty(userProp, JobProperties.USER_TO_PROXY);
final UserGroupInformation user = getProxiedUser(userToProxy);
if (user == null) {
throw new HadoopSecurityManagerException(
"Proxy as any user in unsecured grid is not supported!");
}
return user;
} |
python | def from_local_repository(repository_path, refspec=None):
""" Retrieves the git context from a local git repository.
:param repository_path: Path to the git repository to retrieve the context from
:param refspec: The commit(s) to retrieve
"""
context = GitContext()
# If no refspec is defined, fallback to the last commit on the current branch
if refspec is None:
# We tried many things here e.g.: defaulting to e.g. HEAD or HEAD^... (incl. dealing with
# repos that only have a single commit - HEAD^... doesn't work there), but then we still get into
# problems with e.g. merge commits. Easiest solution is just taking the SHA from `git log -1`.
sha_list = [_git("log", "-1", "--pretty=%H", _cwd=repository_path).replace(u"\n", u"")]
else:
sha_list = _git("rev-list", refspec, _cwd=repository_path).split()
for sha in sha_list:
# Get info from the local git repository: https://git-scm.com/docs/pretty-formats
long_format = "--pretty=%aN%x00%aE%x00%ai%x00%P%n%B"
raw_commit = _git("log", sha, "-1", long_format, _cwd=repository_path).split("\n")
(name, email, date, parents), commit_msg = raw_commit[0].split('\x00'), "\n".join(raw_commit[1:])
commit_parents = parents.split(" ")
commit_is_merge_commit = len(commit_parents) > 1
# changed files in last commit
changed_files = _git("diff-tree", "--no-commit-id", "--name-only", "-r", sha, _cwd=repository_path).split()
# "YYYY-MM-DD HH:mm:ss Z" -> ISO 8601-like format
# Use arrow for datetime parsing, because apparently python is quirky around ISO-8601 dates:
# http://stackoverflow.com/a/30696682/381010
commit_date = arrow.get(ustr(date), "YYYY-MM-DD HH:mm:ss Z").datetime
# Create Git commit object with the retrieved info
commit_msg_obj = GitCommitMessage.from_full_message(commit_msg)
commit = GitCommit(context, commit_msg_obj, sha=sha, author_name=name,
author_email=email, date=commit_date, changed_files=changed_files,
parents=commit_parents, is_merge_commit=commit_is_merge_commit)
context.commits.append(commit)
return context |
java | public static OutputStream marshal(Document document, OutputStream out, String encoding) throws CmsXmlException {
try {
OutputFormat format = OutputFormat.createPrettyPrint();
format.setEncoding(encoding);
XMLWriter writer = new XMLWriter(out, format);
writer.setEscapeText(false);
writer.write(document);
writer.close();
} catch (Exception e) {
throw new CmsXmlException(Messages.get().container(Messages.ERR_MARSHALLING_XML_DOC_0), e);
}
return out;
} |
java | private void checkExistingCriteriaForUserBasedLimit(QueryWhere queryWhere, String userId, UserGroupCallback userGroupCallback) {
List<String> groupIds = userGroupCallback.getGroupsForUser(userId);
Set<String> userAndGroupIds = new HashSet<String>();
if( groupIds != null ) {
userAndGroupIds.addAll(groupIds);
}
userAndGroupIds.add(userId);
if( ! criteriaListForcesUserLimitation(userAndGroupIds, queryWhere.getCriteria()) ) {
addUserRolesLimitCriteria(queryWhere, userId, groupIds);
}
} |
python | def copy_files(filename, dstfilename):
# type: (AnyStr, AnyStr) -> None
"""Copy files with the same name and different suffixes, such as ESRI Shapefile."""
FileClass.remove_files(dstfilename)
dst_prefix = os.path.splitext(dstfilename)[0]
pattern = os.path.splitext(filename)[0] + '.*'
for f in glob.iglob(pattern):
ext = os.path.splitext(f)[1]
dst = dst_prefix + ext
copy(f, dst) |
java | private static String initRemoveEntityQuery(EntityKeyMetadata entityKeyMetadata) {
StringBuilder queryBuilder = new StringBuilder( "MATCH " );
appendEntityNode( "n", entityKeyMetadata, queryBuilder );
queryBuilder.append( " OPTIONAL MATCH (n)-[r]->(e:EMBEDDED), path=(e)-[*0..]->(:EMBEDDED) " );
queryBuilder.append( " DELETE r " );
queryBuilder.append( " FOREACH (er IN relationships(path) | DELETE er) " );
queryBuilder.append( " FOREACH (en IN nodes(path) | DELETE en) " );
queryBuilder.append( " WITH n " );
queryBuilder.append( " OPTIONAL MATCH (n)-[r]-() " );
queryBuilder.append( " DELETE r,n " );
return queryBuilder.toString();
} |
python | def match_length(self):
""" Find the total length of all words that match between the two sequences."""
length = 0
for match in self.get_matching_blocks():
a, b, size = match
length += self._text_length(self.a[a:a+size])
return length |
java | @Exported
public @CheckForNull String getRequiredCoreVersion() {
String v = manifest.getMainAttributes().getValue("Jenkins-Version");
if (v!= null) return v;
v = manifest.getMainAttributes().getValue("Hudson-Version");
if (v!= null) return v;
return null;
} |
java | @Override
public UpdateResolverResult updateResolver(UpdateResolverRequest request) {
request = beforeClientExecution(request);
return executeUpdateResolver(request);
} |
python | def get_child_ids(self):
"""Gets the children of this node.
return: (osid.id.IdList) - the children of this node
*compliance: mandatory -- This method must be implemented.*
"""
id_list = []
from ..id.objects import IdList
for child_node in self._my_map['childNodes']:
id_list.append(str(child_node.ident))
return IdList(id_list) |
python | def mls_polynomial_coefficients(rho, degree):
"""Determine the coefficients for a MLS polynomial smoother.
Parameters
----------
rho : float
Spectral radius of the matrix in question
degree : int
Degree of polynomial coefficients to generate
Returns
-------
Tuple of arrays (coeffs,roots) containing the
coefficients for the (symmetric) polynomial smoother and
the roots of polynomial prolongation smoother.
The coefficients of the polynomial are in descending order
References
----------
.. [1] Parallel multigrid smoothing: polynomial versus Gauss--Seidel
M. F. Adams, M. Brezina, J. J. Hu, and R. S. Tuminaro
J. Comp. Phys., 188 (2003), pp. 593--610
Examples
--------
>>> from pyamg.relaxation.chebyshev import mls_polynomial_coefficients
>>> mls = mls_polynomial_coefficients(2.0, 2)
>>> print mls[0] # coefficients
[ 6.4 -48. 144. -220. 180. -75.8 14.5]
>>> print mls[1] # roots
[ 1.4472136 0.5527864]
"""
# std_roots = np.cos(np.pi * (np.arange(degree) + 0.5)/ degree)
# print std_roots
roots = rho/2.0 * \
(1.0 - np.cos(2*np.pi*(np.arange(degree, dtype='float64') + 1)/(2.0*degree+1.0)))
# print roots
roots = 1.0/roots
# S_coeffs = list(-np.poly(roots)[1:][::-1])
S = np.poly(roots)[::-1] # monomial coefficients of S error propagator
SSA_max = rho/((2.0*degree+1.0)**2) # upper bound spectral radius of S^2A
S_hat = np.polymul(S, S) # monomial coefficients of \hat{S} propagator
S_hat = np.hstack(((-1.0/SSA_max)*S_hat, [1]))
# coeff for combined error propagator \hat{S}S
coeffs = np.polymul(S_hat, S)
coeffs = -coeffs[:-1] # coeff for smoother
return (coeffs, roots) |
java | public Matrix4x3d rotateYXZ(Vector3d angles) {
return rotateYXZ(angles.y, angles.x, angles.z);
} |
python | def download_as_obj(
base_url=d1_common.const.URL_DATAONE_ROOT,
timeout_sec=d1_common.const.DEFAULT_HTTP_TIMEOUT,
):
"""Download public certificate from a TLS/SSL web server as Certificate object.
Also see download_as_der().
Args:
base_url : str
A full URL to a DataONE service endpoint or a server hostname
timeout_sec : int or float
Timeout for the SSL socket operations
Returns:
cryptography.Certificate
"""
return decode_der(download_as_der(base_url, timeout_sec)) |
python | def pack_mbap(transaction_id, protocol_id, length, unit_id):
""" Create and return response MBAP.
:param transaction_id: Transaction id.
:param protocol_id: Protocol id.
:param length: Length of following bytes in ADU.
:param unit_id: Unit id.
:return: Byte array of 7 bytes.
"""
return struct.pack('>HHHB', transaction_id, protocol_id, length, unit_id) |
python | def create_memory_layer(
layer_name, geometry, coordinate_reference_system=None, fields=None):
"""Create a vector memory layer.
:param layer_name: The name of the layer.
:type layer_name: str
:param geometry: The geometry of the layer.
:rtype geometry: QgsWkbTypes (note:
from C++ QgsWkbTypes::GeometryType enum)
:param coordinate_reference_system: The CRS of the memory layer.
:type coordinate_reference_system: QgsCoordinateReferenceSystem
:param fields: Fields of the vector layer. Default to None.
:type fields: QgsFields
:return: The memory layer.
:rtype: QgsVectorLayer
"""
if geometry == QgsWkbTypes.PointGeometry:
wkb_type = QgsWkbTypes.MultiPoint
elif geometry == QgsWkbTypes.LineGeometry:
wkb_type = QgsWkbTypes.MultiLineString
elif geometry == QgsWkbTypes.PolygonGeometry:
wkb_type = QgsWkbTypes.MultiPolygon
elif geometry == QgsWkbTypes.NullGeometry:
wkb_type = QgsWkbTypes.NoGeometry
else:
raise MemoryLayerCreationError(
'Layer geometry must be one of: Point, Line, '
'Polygon or Null, I got %s' % geometry)
if coordinate_reference_system is None:
coordinate_reference_system = QgsCoordinateReferenceSystem()
if fields is None:
fields = QgsFields()
elif not isinstance(fields, QgsFields):
# fields is a list
new_fields = QgsFields()
for f in fields:
new_fields.append(f)
fields = new_fields
memory_layer = QgsMemoryProviderUtils. \
createMemoryLayer(name=layer_name,
fields=fields,
geometryType=wkb_type,
crs=coordinate_reference_system)
memory_layer.dataProvider().createSpatialIndex()
memory_layer.keywords = {
'inasafe_fields': {}
}
return memory_layer |
python | def check_spyder_kernels():
"""Check spyder-kernel requirement."""
try:
import spyder_kernels
required_ver = '1.0.0'
actual_ver = spyder_kernels.__version__
if LooseVersion(actual_ver) < LooseVersion(required_ver):
show_warning("Please check Spyder installation requirements:\n"
"spyder-kernels >= 1.0 is required (found %s)."
% actual_ver)
except ImportError:
show_warning("Failed to import spyder-kernels.\n"
"Please check Spyder installation requirements:\n\n"
"spyder-kernels >= 1.0 is required") |
java | public void setResult(String newResult) {
String oldResult = result;
result = newResult;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__RESULT, oldResult, result));
} |
python | def update(self, item):
"""
Add a collector item.
Args:
item (CollectorUpdate): event data like stage, timestampe and status.
"""
if item.matrix not in self.data:
self.data[item.matrix] = []
result = Select(self.data[item.matrix]).where(
lambda entry: entry.stage == item.stage).build()
if len(result) > 0:
stage = result[0]
stage.status = item.status
stage.add(item.timestamp, item.information)
else:
stage = CollectorStage(stage=item.stage, status=item.status)
stage.add(item.timestamp, item.information)
self.data[item.matrix].append(stage) |
python | def write_bool(self, flag):
""" Writes a boolean to the underlying output file as a 1-byte value. """
if flag:
self.write(b"\x01")
else:
self.write(b"\x00") |
python | def _register_entry_point_module(self, entry_point, module):
"""
Private method that registers an entry_point with a provided
module.
"""
records_map = self._map_entry_point_module(entry_point, module)
self.store_records_for_package(entry_point, list(records_map.keys()))
for module_name, records in records_map.items():
if module_name in self.records:
logger.info(
"module '%s' was already declared in registry '%s'; "
"applying new records on top.",
module_name, self.registry_name,
)
logger.debug("overwriting keys: %s", sorted(
set(self.records[module_name].keys()) &
set(records.keys())
))
self.records[module_name].update(records)
else:
logger.debug(
"adding records for module '%s' to registry '%s'",
module_name, self.registry_name,
)
self.records[module_name] = records |
python | def getPercentiles(data,weights=None,percentiles=[0.5],presorted=False):
'''
Calculates the requested percentiles of (weighted) data. Median by default.
Parameters
----------
data : numpy.array
A 1D array of float data.
weights : np.array
A weighting vector for the data.
percentiles : [float]
A list of percentiles to calculate for the data. Each element should
be in (0,1).
presorted : boolean
Indicator for whether data has already been sorted.
Returns
-------
pctl_out : numpy.array
The requested percentiles of the data.
'''
if weights is None: # Set equiprobable weights if none were passed
weights = np.ones(data.size)/float(data.size)
if presorted: # Sort the data if it is not already
data_sorted = data
weights_sorted = weights
else:
order = np.argsort(data)
data_sorted = data[order]
weights_sorted = weights[order]
cum_dist = np.cumsum(weights_sorted)/np.sum(weights_sorted) # cumulative probability distribution
# Calculate the requested percentiles by interpolating the data over the
# cumulative distribution, then evaluating at the percentile values
inv_CDF = interp1d(cum_dist,data_sorted,bounds_error=False,assume_sorted=True)
pctl_out = inv_CDF(percentiles)
return pctl_out |
java | public void hideSoftKeyboard() {
if(config.commandLogging){
Log.d(config.commandLoggingTag, "hideSoftKeyboard()");
}
dialogUtils.hideSoftKeyboard(null, true, false);
} |
python | def push_new_version(gh_token: str = None, owner: str = None, name: str = None):
"""
Runs git push and git push --tags.
:param gh_token: Github token used to push.
:param owner: Organisation or user that owns the repository.
:param name: Name of repository.
:raises GitError: if GitCommandError is raised
"""
check_repo()
server = 'origin'
if gh_token:
server = 'https://{token}@{repo}'.format(
token=gh_token,
repo='github.com/{owner}/{name}.git'.format(owner=owner, name=name)
)
try:
repo.git.push(server, 'master')
repo.git.push('--tags', server, 'master')
except GitCommandError as error:
message = str(error)
if gh_token:
message = message.replace(gh_token, '[GH_TOKEN]')
raise GitError(message) |
python | def delete(self, **kw):
"""
Delete a policy route from the engine. You can delete using a
single field or multiple fields for a more exact match.
Use a keyword argument to delete a route by any valid attribute.
:param kw: use valid Route keyword values to delete by exact match
"""
delete_by = []
for field, val in kw.items():
if val is not None:
delete_by.append(field)
self.items[:] = [route for route in self.items
if not all(route.get(field) == kw.get(field)
for field in delete_by)] |
python | def has_reg(value):
"""Return True if the given key exists in HKEY_LOCAL_MACHINE, False
otherwise."""
try:
SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, value)
ret = True
except SCons.Util.WinError:
ret = False
return ret |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.