language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public ProxyPolicyHandler removeProxyPolicyHandler(String id) {
return (id != null && this.proxyPolicyHandlers != null) ?
(ProxyPolicyHandler)this.proxyPolicyHandlers.remove(id) :
null;
} |
java | private Map<Integer, List<Row>> createExceptionAssignmentMap(List<Row> rows)
{
Map<Integer, List<Row>> map = new HashMap<Integer, List<Row>>();
for (Row row : rows)
{
Integer calendarID = row.getInteger("ID");
String exceptions = row.getString("EXCEPTIONS");
map.put(calendarID, createExceptionAssignmentRowList(exceptions));
}
return map;
} |
java | public boolean shouldTraceBeDisabled(WSRdbManagedConnectionImpl mc) {
if (!databaseTc.isDebugEnabled() && mc.mcf.loggingEnabled)
return true;
return false;
} |
java | public static void generateSource(Reader reader, File sourceOutputPath) throws IOException {
ProtoFile protoFile = ProtoSchemaParser.parse(DEFAULT_FILE_NAME, reader);
List<CodeDependent> cds = new ArrayList<CodeDependent>();
doCreate(protoFile, true, false, null, true, sourceOutputPath, cds, new HashMap<String, String>(), false);
} |
python | def info(gandi, email):
"""Display information about a mailbox."""
login, domain = email
output_keys = ['login', 'aliases', 'fallback', 'quota', 'responder']
mailbox = gandi.mail.info(domain, login)
output_mailbox(gandi, mailbox, output_keys)
return mailbox |
java | @Override
public CommerceAvailabilityEstimate findByGroupId_Last(long groupId,
OrderByComparator<CommerceAvailabilityEstimate> orderByComparator)
throws NoSuchAvailabilityEstimateException {
CommerceAvailabilityEstimate commerceAvailabilityEstimate = fetchByGroupId_Last(groupId,
orderByComparator);
if (commerceAvailabilityEstimate != null) {
return commerceAvailabilityEstimate;
}
StringBundler msg = new StringBundler(4);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("groupId=");
msg.append(groupId);
msg.append("}");
throw new NoSuchAvailabilityEstimateException(msg.toString());
} |
python | def CompareStores(self):
"""Compares the contents of two stores.
Returns:
bool: True if the content of the stores is identical.
"""
storage_reader = storage_factory.StorageFactory.CreateStorageReaderForFile(
self._storage_file_path)
if not storage_reader:
logger.error(
'Format of storage file: {0:s} not supported'.format(
self._storage_file_path))
return False
compare_storage_reader = (
storage_factory.StorageFactory.CreateStorageReaderForFile(
self._compare_storage_file_path))
if not compare_storage_reader:
logger.error(
'Format of storage file: {0:s} not supported'.format(
self._compare_storage_file_path))
return False
try:
result = self._CompareStores(storage_reader, compare_storage_reader)
finally:
compare_storage_reader.Close()
storage_reader.Close()
if result:
self._output_writer.Write('Storage files are identical.\n')
else:
self._output_writer.Write('Storage files are different.\n')
return result |
java | @SuppressFBWarnings(justification = "Accepting that this is a bad practice - but made more sense in this use case",
value = {"NP_BOOLEAN_RETURN_NULL"})
public Boolean isAutoUpdate() {
return (line != null && line.hasOption(ARGUMENT.DISABLE_AUTO_UPDATE)) ? false : null;
} |
java | public java.awt.Graphics2D createPrinterGraphicsShapes(float width, float height, PrinterJob printerJob) {
return new PdfPrinterGraphics2D(this, width, height, null, true, false, 0, printerJob);
} |
java | private void repaintDueToPlaybackStateChange(long oldMaxPosition, long newMaxPosition,
PlaybackState oldState, PlaybackState newState) {
if (duration.get() > 0) { // We are only drawing markers if we know the track duration
final int width = waveformWidth() + 8;
// See if we need to redraw a stretch of the “played until” stripe.
if (oldMaxPosition > newMaxPosition) {
final int left = Math.max(0, Math.min(width, millisecondsToX(newMaxPosition) - 6));
final int right = Math.max(0, Math.min(width, millisecondsToX(oldMaxPosition) + 6));
delegatingRepaint(left, 0, right - left, getHeight());
} else if (newMaxPosition > oldMaxPosition) {
final int left = Math.max(0, Math.min(width, millisecondsToX(oldMaxPosition) - 6));
final int right = Math.max(0, Math.min(width, millisecondsToX(newMaxPosition) + 6));
delegatingRepaint(left, 0, right - left, getHeight());
}
// Also refresh where the specific marker was moved from and/or to.
if (oldState != null && (newState == null || newState.position != oldState.position)) {
final int left = Math.max(0, Math.min(width, millisecondsToX(oldState.position) - 6));
final int right = Math.max(0, Math.min(width, millisecondsToX(oldState.position) + 6));
delegatingRepaint(left, 0, right - left, getHeight());
}
if (newState != null && (oldState == null || newState.position != oldState.position)) {
final int left = Math.max(0, Math.min(width, millisecondsToX(newState.position) - 6));
final int right = Math.max(0, Math.min(width, millisecondsToX(newState.position) + 6));
delegatingRepaint(left, 0, right - left, getHeight());
}
}
} |
python | def as_tree(context):
"""Return info about an object's members as JSON"""
tree = _build_tree(context, 2, 1)
if type(tree) == dict:
tree = [tree]
return Response(content_type='application/json', body=json.dumps(tree)) |
java | public static Map<String, Field> createSuperColumnsFieldMap(final EntityMetadata m,
final KunderaMetadata kunderaMetadata)
{
Map<String, Field> superColumnNameToFieldMap = new HashMap<String, Field>();
getEmbeddableType(m, null, superColumnNameToFieldMap, kunderaMetadata);
return superColumnNameToFieldMap;
} |
python | def _connect(self, server, port, tls=True, tls_verify=True, proxy=False,
proxy_type='SOCKS5', proxy_server=None,
proxy_port=None, proxy_username=None, proxy_password=None):
"""
Connects the socket to an IRC server.
Required arguments:
* server - Server to connect to.
* port - Port to use.
Optional arguments:
* tls=True - Should we use TLS/SSL?
* tls_verify=True - Verify the TLS certificate?
Only works with Python 3.
* proxy=False - Should we use a proxy?
* proxy_type='SOCKS5' - Proxy type: SOCKS5, SOCKS4 or HTTP
* proxy_server=None - Proxy server's address
* proxy_port=None - Proxy server's port
* proxy_username=None - If SOCKS5 is used,
a proxy username/password can be specified.
* proxy_password=None - If SOCKS5 is used,
a proxy username/password can be specified.
"""
with self.lock:
if proxy:
if proxy_type == 'SOCKS5':
proxy_type = self._m_proxy.PROXY_TYPE_SOCKS5
elif proxy_type == 'SOCKS4':
proxy_type = self._m_proxy.PROXY_TYPE_SOCKS4
elif proxy_type == 'HTTP':
proxy_type = self._m_proxy.PROXY_TYPE_HTTP
self._socket = self._m_proxy.socksocket()
self._socket.setproxy(proxytype=proxy_type, \
addr=proxy_server, \
port=proxy_port, \
username=proxy_username, \
password=proxy_password)
if tls:
if tls_verify:
ca_bundle = self._m_tempfile.NamedTemporaryFile().name
with open(ca_bundle, 'w') as bundle_file:
bundle_file.write(self._ca_bundle)
cert_required = self._m_tls.CERT_REQUIRED
self._socket = \
self._m_tls.wrap_socket(self._socket, \
cert_reqs=cert_required, \
ca_certs=ca_bundle)
self._socket.connect((server, port))
self._m_tls.match_hostname(self._socket.getpeercert(), \
server)
return None
else:
self._socket = self._m_tls.wrap_socket(self._socket)
self._socket.connect((server, port)) |
java | public void deletePersonalFavorite(Target target) throws WorkspaceApiException {
try {
ApiSuccessResponse resp = targetsApi.deletePersonalFavorite(String.valueOf(target.getId()), target.getType().getValue());
Util.throwIfNotOk(resp);
}
catch(ApiException ex) {
throw new WorkspaceApiException("Cannot delete personal favorite", ex);
}
} |
python | def init(vcs):
"""Initialize the locking module for a repository
"""
path = os.path.join(vcs.private_dir(), 'locks')
if not os.path.exists(path):
os.mkdir(path) |
python | def backend_status(self, backend='ibmqx4', access_token=None, user_id=None):
"""
Get the status of a chip
"""
if access_token:
self.req.credential.set_token(access_token)
if user_id:
self.req.credential.set_user_id(user_id)
backend_type = self._check_backend(backend, 'status')
if not backend_type:
raise BadBackendError(backend)
status = self.req.get('/Backends/' + backend_type + '/queue/status',
with_token=False)
ret = {}
if 'state' in status:
ret['available'] = bool(status['state'])
if 'busy' in status:
ret['busy'] = bool(status['busy'])
if 'lengthQueue' in status:
ret['pending_jobs'] = status['lengthQueue']
ret['backend'] = backend_type
return ret |
python | def activate(self, *, filter_func=None):
'''
Activate the type safety checker. After the call all functions
that need to be checked will be.
'''
if self.active:
raise RuntimeError("Type safety check already active")
self.__module_finder = ModuleFinder(Validator.decorate)
if filter_func is not None:
self.__module_finder.set_filter(filter_func)
self.__module_finder.install() |
python | def vcenter_blit(target, source, dest = (0, 0), area=None, special_flags=0):
'''
The same as center_blit(), but only centers vertically.
'''
loc = lambda d, s: (_vec(0, d.get_height() / 2) -
_vec(0, s.get_height() / 2))
_blitter(loc, target, source, dest, area, special_flags) |
java | @Override
public DescribeCacheSubnetGroupsResult describeCacheSubnetGroups(DescribeCacheSubnetGroupsRequest request) {
request = beforeClientExecution(request);
return executeDescribeCacheSubnetGroups(request);
} |
java | public ServiceFuture<TaskInner> updateAsync(String resourceGroupName, String registryName, String taskName, TaskUpdateParameters taskUpdateParameters, final ServiceCallback<TaskInner> serviceCallback) {
return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, registryName, taskName, taskUpdateParameters), serviceCallback);
} |
java | public EntityResult deleteEntities(final String ... guids) throws AtlasServiceException {
LOG.debug("Deleting entities: {}", guids);
JSONObject jsonResponse = callAPIWithRetries(API.DELETE_ENTITIES, null, new ResourceCreator() {
@Override
public WebResource createResource() {
API api = API.DELETE_ENTITIES;
WebResource resource = getResource(api);
for (String guid : guids) {
resource = resource.queryParam(GUID.toLowerCase(), guid);
}
return resource;
}
});
EntityResult results = extractEntityResult(jsonResponse);
LOG.debug("Delete entities returned results: {}", results);
return results;
} |
python | def _merge_fastqc(samples):
"""
merge all fastqc samples into one by module
"""
fastqc_list = collections.defaultdict(list)
seen = set()
for data in samples:
name = dd.get_sample_name(data)
if name in seen:
continue
seen.add(name)
fns = glob.glob(os.path.join(dd.get_work_dir(data), "qc", dd.get_sample_name(data), "fastqc") + "/*")
for fn in fns:
if fn.endswith("tsv"):
metric = os.path.basename(fn)
fastqc_list[metric].append([name, fn])
for metric in fastqc_list:
dt_by_sample = []
for fn in fastqc_list[metric]:
dt = pd.read_csv(fn[1], sep="\t")
dt['sample'] = fn[0]
dt_by_sample.append(dt)
dt = utils.rbind(dt_by_sample)
dt.to_csv(metric, sep="\t", index=False, mode ='w')
return samples |
python | def supports(cls, template_file=None):
"""
:return: Whether the engine can process given template file or not.
"""
if anytemplate.compat.IS_PYTHON_3:
cls._priority = 99
return False # Always as it's not ported to python 3.
return super(Engine, cls).supports(template_file=template_file) |
python | def MetaField(self, name, t=None):
"""
Creates an instance of a metadata field of the dataset. It can be used in building expressions
or conditions for projection or selection.
Notice that this function is equivalent to call::
dataset["name"]
If the MetaField is used in a region projection (:meth:`~.reg_project`), the user has also to specify the type
of the metadata attribute that is selected::
dataset.reg_project(new_field_dict={'new_field': dataset['name', 'string']})
:param name: the name of the metadata that is considered
:param t: the type of the metadata attribute {string, int, boolean, double}
:return: a MetaField instance
"""
return MetaField(name=name, index=self.__index, t=t) |
python | def fft_convolve(data, h, res_g = None,
plan = None, inplace = False,
kernel_is_fft = False,
kernel_is_fftshifted = False):
""" convolves data with kernel h via FFTs
data should be either a numpy array or a OCLArray (see doc for fft)
both data and h should be same shape
if data/h are OCLArrays, then:
- type should be complex64
- shape should be equal and power of two
- h is assumed to be already fftshifted
(otherwise set kernel_is_fftshifted to true)
"""
if isinstance(data,np.ndarray):
return _fft_convolve_numpy(data, h,
plan = plan,
kernel_is_fft = kernel_is_fft,
kernel_is_fftshifted = kernel_is_fftshifted)
elif isinstance(data,OCLArray):
return _fft_convolve_gpu(data,h, res_g = res_g,
plan = plan, inplace = inplace,
kernel_is_fft = kernel_is_fft)
else:
raise TypeError("array argument (1) has bad type: %s"%type(data)) |
java | public WxCpMessageRouterRule handler(WxCpMessageHandler handler, WxCpMessageHandler... otherHandlers) {
this.handlers.add(handler);
if (otherHandlers != null && otherHandlers.length > 0) {
for (WxCpMessageHandler i : otherHandlers) {
this.handlers.add(i);
}
}
return this;
} |
python | def ramping_values(period=360):
"""
Provides an infinite source of values representing a triangle wave (from 0
to 1 and back again) which repeats every *period* values. For example, to
pulse an LED once a second::
from gpiozero import PWMLED
from gpiozero.tools import ramping_values
from signal import pause
red = PWMLED(2)
red.source_delay = 0.01
red.source = ramping_values(100)
pause()
If you require a wider range than 0 to 1, see :func:`scaled`.
"""
step = 2 / period
value = 0
while True:
yield value
value += step
if isclose(value, 1, abs_tol=1e-9):
value = 1
step *= -1
elif isclose(value, 0, abs_tol=1e-9):
value = 0
step *= -1
elif value > 1 or value < 0:
step *= -1
value += step |
java | public static int getHour(Date date) {
Calendar c = Calendar.getInstance();
c.setTime(date);
return c.get(Calendar.HOUR_OF_DAY);
} |
java | public synchronized void dequeue() throws IOException {
final int length = byteDiskQueue.dequeueInt();
buffer.size(length);
byteDiskQueue.dequeue(buffer.elements(), 0, length);
size--;
} |
java | public static int getSingleCodePoint(CharSequence s) {
if (s == null || s.length() == 0) {
return -1;
} else if (s.length() == 1) {
return s.charAt(0);
} else if (s.length() > 2) {
return -1;
}
// at this point, len = 2
int cp = Character.codePointAt(s, 0);
if (cp > 0xFFFF) { // is surrogate pair
return cp;
}
return -1;
} |
python | def expand_requirement(request, paths=None):
"""Expands a requirement string like 'python-2.*', 'foo-2.*+<*', etc.
Wildcards are expanded to the latest version that matches. There is also a
special wildcard '**' that will expand to the full version, but it cannot
be used in combination with '*'.
Wildcards MUST placehold a whole version token, not partial - while 'foo-2.*'
is valid, 'foo-2.v*' is not.
Wildcards MUST appear at the end of version numbers - while 'foo-1.*.*' is
valid, 'foo-1.*.0' is not.
It is possible that an expansion will result in an invalid request string
(such as 'foo-2+<2'). The appropriate exception will be raised if this
happens.
Examples:
>>> print expand_requirement('python-2.*')
python-2.7
>>> print expand_requirement('python==2.**')
python==2.7.12
>>> print expand_requirement('python<**')
python<3.0.5
Args:
request (str): Request to expand, eg 'python-2.*'
paths (list of str, optional): paths to search for package families,
defaults to `config.packages_path`.
Returns:
str: Expanded request string.
"""
if '*' not in request:
return request
from rez.vendor.version.version import VersionRange
from rez.vendor.version.requirement import Requirement
from rez.packages_ import get_latest_package
from uuid import uuid4
wildcard_map = {}
expanded_versions = {}
request_ = request
# replace wildcards with valid version tokens that can be replaced again
# afterwards. This produces a horrendous, but both valid and temporary,
# version string.
#
while "**" in request_:
uid = "_%s_" % uuid4().hex
request_ = request_.replace("**", uid, 1)
wildcard_map[uid] = "**"
while '*' in request_:
uid = "_%s_" % uuid4().hex
request_ = request_.replace('*', uid, 1)
wildcard_map[uid] = '*'
# create the requirement, then expand wildcards
#
req = Requirement(request_, invalid_bound_error=False)
def expand_version(version):
rank = len(version)
wildcard_found = False
while version and str(version[-1]) in wildcard_map:
token = wildcard_map[str(version[-1])]
version = version.trim(len(version) - 1)
if token == "**":
if wildcard_found: # catches bad syntax '**.*'
return None
else:
wildcard_found = True
rank = 0
break
wildcard_found = True
if not wildcard_found:
return None
range_ = VersionRange(str(version))
package = get_latest_package(name=req.name, range_=range_, paths=paths)
if package is None:
return version
if rank:
return package.version.trim(rank)
else:
return package.version
def visit_version(version):
# requirements like 'foo-1' are actually represented internally as
# 'foo-1+<1_' - '1_' is the next possible version after '1'. So we have
# to detect this case and remap the uid-ified wildcard back here too.
#
for v, expanded_v in expanded_versions.iteritems():
if version == v.next():
return expanded_v.next()
version_ = expand_version(version)
if version_ is None:
return None
expanded_versions[version] = version_
return version_
if req.range_ is not None:
req.range_.visit_versions(visit_version)
result = str(req)
# do some cleanup so that long uids aren't left in invalid wildcarded strings
for uid, token in wildcard_map.iteritems():
result = result.replace(uid, token)
# cast back to a Requirement again, then back to a string. This will catch
# bad verison ranges, but will also put OR'd version ranges into the correct
# order
expanded_req = Requirement(result)
return str(expanded_req) |
python | def send_notification(*, subsystem, recipients, subject, body_html, body_text):
"""Method to send a notification. A plugin may use only part of the information, but all fields are required.
Args:
subsystem (`str`): Name of the subsystem originating the notification
recipients (`list` of :obj:`NotificationContact`): List of recipients
subject (`str`): Subject / title of the notification
body_html (`str)`: HTML formatted version of the message
body_text (`str`): Text formatted version of the message
Returns:
`None`
"""
from cloud_inquisitor import CINQ_PLUGINS
if not body_html and not body_text:
raise ValueError('body_html or body_text must be provided')
# Make sure that we don't have any duplicate recipients
recipients = list(set(recipients))
notifiers = map(lambda plugin: plugin.load(), CINQ_PLUGINS['cloud_inquisitor.plugins.notifiers']['plugins'])
for cls in filter(lambda x: x.enabled(), notifiers):
for recipient in recipients:
if isinstance(recipient, NotificationContact):
if recipient.type == cls.notifier_type:
try:
notifier = cls()
notifier.notify(subsystem, recipient.value, subject, body_html, body_text)
except Exception:
log.exception('Failed sending notification for {}/{}'.format(
recipient.type,
recipient.value
))
else:
log.warning('Unexpected recipient {}'.format(recipient)) |
java | @Nullable
public UserDataObject confirmUploadedFile (@Nullable final String sFieldName)
{
return m_aRWLock.writeLocked ( () -> {
if (StringHelper.hasText (sFieldName))
{
// Remove an eventually existing old UDO
final TemporaryUserDataObject aUDO = m_aMap.remove (sFieldName);
if (aUDO != null)
{
LOGGER.info ("Confirmed uploaded file " + aUDO);
// Convert from temporary to real UDO
return new UserDataObject (aUDO.getPath ());
}
}
return null;
});
} |
python | def create(self, path: str, k: int = 20):
"""
Create from a scored lexicon file (fast_align format) using vocab from a trained Sockeye model.
:param path: Path to lexicon file.
:param k: Number of target entries per source to keep.
"""
self.lex = np.zeros((len(self.vocab_source), k), dtype=np.int)
src_unk_id = self.vocab_source[C.UNK_SYMBOL]
trg_unk_id = self.vocab_target[C.UNK_SYMBOL]
num_insufficient = 0 # number of source tokens with insufficient number of translations given k
for src_id, group in groupby(lexicon_iterator(path, self.vocab_source, self.vocab_target), key=itemgetter(0)):
# Unk token will always be part of target vocab, so no need to track it here
if src_id == src_unk_id:
continue
# filter trg_unk_id
filtered_group = ((trg_id, prob) for src_id, trg_id, prob in group if trg_id != trg_unk_id)
# sort by prob and take top k
top_k = [trg_id for trg_id, prob in sorted(filtered_group, key=itemgetter(1), reverse=True)[:k]]
if len(top_k) < k:
num_insufficient += 1
self.lex[src_id, :len(top_k)] = top_k
logger.info("Created top-k lexicon from \"%s\", k=%d. %d source tokens with fewer than %d translations",
path, k, num_insufficient, k) |
java | public static void checkForEqualDimensions(ArrayND a0, ArrayND a1)
{
if (a0.getSize().getSize() != a1.getSize().getSize())
{
throw new IllegalArgumentException(
"Arrays have different dimensions: "+a0.getSize().getSize()+
" and "+a1.getSize().getSize());
}
} |
python | def filter_arrange_nodes(nodes: List[ast.stmt], max_line_number: int) -> List[ast.stmt]:
"""
Finds all nodes that are before the ``max_line_number`` and are not
docstrings or ``pass``.
"""
return [
node for node in nodes if node.lineno < max_line_number and not isinstance(node, ast.Pass)
and not (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str))
] |
java | public StartPoint value(Object value) {
StartExpression sx = (StartExpression)this.astNode;
sx.getPropertyOrQuery().setPropertyValue(value);
StartPoint ret = new StartPoint(sx);
return ret;
} |
java | public ServiceFuture<AssetInner> createOrUpdateAsync(String resourceGroupName, String accountName, String assetName, AssetInner parameters, final ServiceCallback<AssetInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, assetName, parameters), serviceCallback);
} |
python | def create_release_vcs(path, vcs_name=None):
"""Return a new release VCS that can release from this source path."""
from rez.plugin_managers import plugin_manager
vcs_types = get_release_vcs_types()
if vcs_name:
if vcs_name not in vcs_types:
raise ReleaseVCSError("Unknown version control system: %r" % vcs_name)
cls = plugin_manager.get_plugin_class('release_vcs', vcs_name)
return cls(path)
classes_by_level = {}
for vcs_name in vcs_types:
cls = plugin_manager.get_plugin_class('release_vcs', vcs_name)
result = cls.find_vcs_root(path)
if not result:
continue
vcs_path, levels_up = result
classes_by_level.setdefault(levels_up, []).append((cls, vcs_path))
if not classes_by_level:
raise ReleaseVCSError("No version control system for package "
"releasing is associated with the path %s" % path)
# it's ok to have multiple results, as long as there is only one at the
# "closest" directory up from this dir - ie, if we start at:
# /blah/foo/pkg_root
# and these dirs exist:
# /blah/.hg
# /blah/foo/.git
# ...then this is ok, because /blah/foo/.git is "closer" to the original
# dir, and will be picked. However, if these two directories exist:
# /blah/foo/.git
# /blah/foo/.hg
# ...then we error, because we can't decide which to use
lowest_level = sorted(classes_by_level)[0]
clss = classes_by_level[lowest_level]
if len(clss) > 1:
clss_str = ", ".join(x[0].name() for x in clss)
raise ReleaseVCSError("Several version control systems are associated "
"with the path %s: %s. Use rez-release --vcs to "
"choose." % (path, clss_str))
else:
cls, vcs_root = clss[0]
return cls(pkg_root=path, vcs_root=vcs_root) |
python | def ILIKE(pattern):
"""Unix shell-style wildcards. Case-insensitive"""
return P(lambda x: fnmatch.fnmatch(x.lower(), pattern.lower())) |
python | def ckw01(handle, begtim, endtim, inst, ref, avflag, segid, nrec, sclkdp, quats,
avvs):
"""
Add a type 1 segment to a C-kernel.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ckw01_c.html
:param handle: Handle of an open CK file.
:type handle: int
:param begtim: The beginning encoded SCLK of the segment.
:type begtim: float
:param endtim: The ending encoded SCLK of the segment.
:type endtim: float
:param inst: The NAIF instrument ID code.
:type inst: int
:param ref: The reference frame of the segment.
:type ref: str
:param avflag: True if the segment will contain angular velocity.
:type avflag: bool
:param segid: Segment identifier.
:type segid: str
:param nrec: Number of pointing records.
:type nrec: int
:param sclkdp: Encoded SCLK times.
:type sclkdp: Array of floats
:param quats: Quaternions representing instrument pointing.
:type quats: Nx4-Element Array of floats
:param avvs: Angular velocity vectors.
:type avvs: Nx3-Element Array of floats
"""
handle = ctypes.c_int(handle)
begtim = ctypes.c_double(begtim)
endtim = ctypes.c_double(endtim)
inst = ctypes.c_int(inst)
ref = stypes.stringToCharP(ref)
avflag = ctypes.c_int(avflag)
segid = stypes.stringToCharP(segid)
sclkdp = stypes.toDoubleVector(sclkdp)
quats = stypes.toDoubleMatrix(quats)
avvs = stypes.toDoubleMatrix(avvs)
nrec = ctypes.c_int(nrec)
libspice.ckw01_c(handle, begtim, endtim, inst, ref, avflag, segid, nrec,
sclkdp, quats, avvs) |
python | def convert_pnm(self, infile, outfile):
"""
Convert a PNM file containing raw pixel data into a PNG file
with the parameters set in the writer object. Works for
(binary) PGM, PPM, and PAM formats.
"""
if self.interlace:
pixels = array('B')
pixels.fromfile(infile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.file_scanlines(infile)) |
python | def get(key, default=-1):
"""Backport support for original codes."""
if isinstance(key, int):
return LinkType(key)
if key not in LinkType._member_map_:
extend_enum(LinkType, key, default)
return LinkType[key] |
python | def _register_attribute(self, did, checksum, value, account, providers):
"""Register an DID attribute as an event on the block chain.
:param did: 32 byte string/hex of the DID
:param checksum: checksum of the ddo, hex str
:param value: url for resolve the did, str
:param account: account owner of this DID registration record
:param providers: list of providers addresses
"""
assert isinstance(providers, list), ''
return self.send_transaction(
'registerAttribute',
(did,
checksum,
providers,
value),
transact={'from': account.address,
'passphrase': account.password}
) |
java | private void writePackageInfos( File rootDir, File outputDir )
throws MojoExecutionException
{
getLog().debug( "in writePackageInfos(" + rootDir + ", " + outputDir + ")" );
try
{
if ( shouldWritePackageInfo( rootDir ) )
{
if( !outputDir.exists() && !outputDir.mkdirs() ) {
throw new MojoExecutionException( "outputDirectory was unable to be created: " + outputDir );
}
writePackageInfo( outputDir );
}
else
{
getLog().debug( "no files in:" + rootDir );
}
}
catch ( Throwable e )
{
throw new MojoExecutionException( "could not write PackageInfo.java in: " + outputDir, e );
}
// get any sub-directories
File[] subdirs = rootDir.listFiles( new FileFilter()
{
public boolean accept( File pathname )
{
return pathname.isDirectory();
}
} );
// recurse
if ( subdirs != null ) {
for (File subdir : subdirs) {
writePackageInfos(subdir, new File(outputDir, subdir.getName()));
}
}
} |
python | def get_package_formats():
"""Get the list of available package formats and parameters."""
# pylint: disable=fixme
# HACK: This obviously isn't great, and it is subject to change as
# the API changes, but it'll do for now as a interim method of
# introspection to get the parameters we need.
def get_parameters(cls):
"""Build parameters for a package format."""
params = {}
# Create a dummy instance so we can check if a parameter is required.
# As with the rest of this function, this is obviously hacky. We'll
# figure out a way to pull this information in from the API later.
dummy_kwargs = {k: "dummy" for k in cls.swagger_types}
instance = cls(**dummy_kwargs)
for k, v in six.iteritems(cls.swagger_types):
attr = getattr(cls, k)
docs = attr.__doc__.strip().split("\n")
doc = (docs[1] if docs[1] else docs[0]).strip()
try:
setattr(instance, k, None)
required = False
except ValueError:
required = True
params[cls.attribute_map.get(k)] = {
"type": v,
"help": doc,
"required": required,
}
return params
return {
key.replace("PackagesUpload", "").lower(): get_parameters(cls)
for key, cls in inspect.getmembers(cloudsmith_api.models)
if key.startswith("PackagesUpload")
} |
java | public static CPDefinitionVirtualSetting removeByUUID_G(String uuid,
long groupId)
throws com.liferay.commerce.product.type.virtual.exception.NoSuchCPDefinitionVirtualSettingException {
return getPersistence().removeByUUID_G(uuid, groupId);
} |
java | @Deprecated
public static String getTypeName(Class<? extends Tag<?>> clazz) {
return TagType.getByTagClass(clazz).getTypeName();
} |
python | def find_c_file(obj_file, vpath):
""" Search vpaths for the c file that matches the provided object_file.
:param str obj_file: object file to find the matching c file for
:param List[str] vpath: List of base paths, similar to gcc vpath
:return: str path to c file or None
"""
c_file = None
relative_c_file = os.path.splitext(obj_file)[0] + ".c"
relative_c_file = relative_c_file.lstrip('/\\')
for p in vpath:
possible_c_file = os.path.join(p, relative_c_file)
if os.path.exists(possible_c_file):
c_file = possible_c_file
break
return c_file |
python | def unprefix(self, path):
"""Remove the self.prefix_ (if present) from a path or list of paths"""
path = self.strip(path)
if isinstance(path, six.string_types):
path = path[len(self.prefix_):] if path.startswith(self.prefix_) else path
path = path[1:] if path.startswith(self.separator) else path
return path
if isinstance(path, (list, tuple)):
path = [p[len(self.prefix_):] if p.startswith(self.prefix_) else p for p in path]
path = [p[1:] if p.startswith(self.separator) else p for p in path]
return path |
java | public V get (long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException
{
return _sync.innerGet(unit.toNanos(timeout));
} |
python | def register_blueprint(self, blueprint):
'''
Register given blueprint on curren app.
This method is provided for using inside plugin's module-level
:func:`register_plugin` functions.
:param blueprint: blueprint object with plugin endpoints
:type blueprint: flask.Blueprint
'''
if blueprint not in self._blueprint_known:
self.app.register_blueprint(blueprint)
self._blueprint_known.add(blueprint) |
python | def get_original_vs_converted_diff( original ,converted ):
''' Compares the *original* text to *converted* text, and detects changes/differences in
morphological annotations.
The method constructs line-by-line comparison string, where lines are separated by
newline, and '***' at the beginning of the line indicates the difference.
Returns a pair: results of the line-by-line comparison as a string, and boolean value
indicating whether there were any differences.
'''
from estnltk.syntax.syntax_preprocessing import convert_Text_to_mrf
old_layer_mrf = convert_Text_to_mrf( original )
new_layer_mrf = convert_Text_to_mrf( converted )
max_len_1 = max([len(l) for l in old_layer_mrf ])
max_len_2 = max([len(l) for l in new_layer_mrf ])
max_len = max( max_len_1, max_len_2 )
format_str = '{:<'+str(max_len+1)+'}'
i = 0
j = 0
comp_lines = []
diff_found = False
while(i < len(old_layer_mrf) or j < len(new_layer_mrf)):
l1 = old_layer_mrf[i]
l2 = new_layer_mrf[j]
# 1) Output line containing tokens
if not l1.startswith(' ') and not l2.startswith(' '):
diff = '*** ' if format_str.format(l1) != format_str.format(l2) else ' '
comp_lines.append( diff+format_str.format(l1)+format_str.format(l2) )
if diff == '*** ':
diff_found = True
i += 1
j += 1
else:
# 2) Output analysis line(s)
while(i < len(old_layer_mrf) or j < len(new_layer_mrf)):
l1 = old_layer_mrf[i]
l2 = new_layer_mrf[j]
if l1.startswith(' ') and l2.startswith(' '):
diff = '*** ' if format_str.format(l1) != format_str.format(l2) else ' '
comp_lines.append( diff+format_str.format(l1)+format_str.format(l2) )
if diff == '*** ':
diff_found = True
i += 1
j += 1
elif l1.startswith(' ') and not l2.startswith(' '):
diff = '*** '
comp_lines.append( diff+format_str.format(l1)+format_str.format(' ') )
diff_found = True
i += 1
elif not l1.startswith(' ') and l2.startswith(' '):
diff = '*** '
comp_lines.append( diff+format_str.format(' ')+format_str.format(l2) )
diff_found = True
j += 1
else:
break
return '\n'.join( comp_lines ), diff_found |
python | def increment_cell_value(self, column_family_id, column, int_value):
"""Increments a value in an existing cell.
Assumes the value in the cell is stored as a 64 bit integer
serialized to bytes.
.. note::
This method adds a read-modify rule protobuf to the accumulated
read-modify rules on this row, but does not make an API
request. To actually send an API request (with the rules) to the
Google Cloud Bigtable API, call :meth:`commit`.
For example:
.. literalinclude:: snippets_table.py
:start-after: [START bigtable_row_increment_cell_value]
:end-before: [END bigtable_row_increment_cell_value]
:type column_family_id: str
:param column_family_id: The column family that contains the column.
Must be of the form
``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
:type column: bytes
:param column: The column within the column family where the cell
is located.
:type int_value: int
:param int_value: The value to increment the existing value in the cell
by. If the targeted cell is unset, it will be treated
as containing a zero. Otherwise, the targeted cell
must contain an 8-byte value (interpreted as a 64-bit
big-endian signed integer), or the entire request
will fail.
"""
column = _to_bytes(column)
rule_pb = data_v2_pb2.ReadModifyWriteRule(
family_name=column_family_id,
column_qualifier=column,
increment_amount=int_value,
)
self._rule_pb_list.append(rule_pb) |
java | public Function<Service<HttpRequest, HttpResponse>, HttpAuthService> newDecorator() {
final Authorizer<HttpRequest> authorizer = authorizer();
final AuthSuccessHandler<HttpRequest, HttpResponse> successHandler = this.successHandler;
final AuthFailureHandler<HttpRequest, HttpResponse> failureHandler = this.failureHandler;
return service -> new HttpAuthService(service, authorizer, successHandler, failureHandler);
} |
java | @POST
@RolesAllowed("administrators")
@Path("/remove-workspace/{repositoryName}/{workspaceName}/{forseSessionClose}/")
public Response removeWorkspace(@Context UriInfo uriInfo, @PathParam("repositoryName") String repositoryName,
@PathParam("workspaceName") String workspaceName, @PathParam("forseSessionClose") Boolean forseSessionClose)
{
String errorMessage = new String();
Status status;
try
{
ManageableRepository repository = repositoryService.getRepository(repositoryName);
if (forseSessionClose)
{
forceCloseSession(repositoryName, workspaceName);
}
if (repository.canRemoveWorkspace(workspaceName))
{
repository.removeWorkspace(workspaceName);
repositoryService.getConfig().retain(); // save configuration to persistence (file or persister)
return Response.ok().build();
}
return Response.status(Status.CONFLICT).entity(
"Can't remove workspace " + workspaceName + " in repository " + repositoryName).cacheControl(NO_CACHE)
.build();
}
catch (RepositoryException e)
{
if (log.isDebugEnabled())
{
log.error(e.getMessage(), e);
}
errorMessage = e.getMessage();
status = Status.NOT_FOUND;
}
catch (RepositoryConfigurationException e)
{
if (log.isDebugEnabled())
{
log.error(e.getMessage(), e);
}
errorMessage = e.getMessage();
status = Status.NOT_FOUND;
}
catch (Throwable e) //NOSONAR
{
if (log.isDebugEnabled())
{
log.error(e.getMessage(), e);
}
errorMessage = e.getMessage();
status = Status.INTERNAL_SERVER_ERROR;
}
return Response.status(status).entity(errorMessage).type(MediaType.TEXT_PLAIN_TYPE).cacheControl(NO_CACHE)
.build();
} |
java | @Override
public void unregister(Monitor<?> monitor) {
Preconditions.checkNotNull(monitor, "monitor");
try {
monitors.remove(monitor);
} catch (Exception e) {
throw new IllegalArgumentException("invalid object", e);
}
} |
python | def addSlider2D(sliderfunc, xmin, xmax, value=None, pos=4, s=.04,
title='', c=None, showValue=True):
"""Add a slider widget which can call an external custom function.
:param sliderfunc: external function to be called by the widget
:param float xmin: lower value
:param float xmax: upper value
:param float value: current value
:param list pos: position corner number: horizontal [1-4] or vertical [11-14]
it can also be specified by corners coordinates [(x1,y1), (x2,y2)]
:param str title: title text
:param bool showValue: if true current value is shown
.. hint:: |sliders| |sliders.py|_
"""
vp = settings.plotter_instance
if c is None: # automatic black or white
c = (0.8, 0.8, 0.8)
if numpy.sum(colors.getColor(vp.backgrcol)) > 1.5:
c = (0.2, 0.2, 0.2)
c = colors.getColor(c)
if value is None or value < xmin:
value = xmin
sliderRep = vtk.vtkSliderRepresentation2D()
sliderRep.SetMinimumValue(xmin)
sliderRep.SetMaximumValue(xmax)
sliderRep.SetValue(value)
sliderRep.SetSliderLength(0.015)
sliderRep.SetSliderWidth(0.025)
sliderRep.SetEndCapLength(0.0015)
sliderRep.SetEndCapWidth(0.0125)
sliderRep.SetTubeWidth(0.0075)
sliderRep.GetPoint1Coordinate().SetCoordinateSystemToNormalizedDisplay()
sliderRep.GetPoint2Coordinate().SetCoordinateSystemToNormalizedDisplay()
if utils.isSequence(pos):
sliderRep.GetPoint1Coordinate().SetValue(pos[0][0], pos[0][1])
sliderRep.GetPoint2Coordinate().SetValue(pos[1][0], pos[1][1])
elif pos == 1: # top-left horizontal
sliderRep.GetPoint1Coordinate().SetValue(0.04, 0.96)
sliderRep.GetPoint2Coordinate().SetValue(0.45, 0.96)
elif pos == 2:
sliderRep.GetPoint1Coordinate().SetValue(0.55, 0.96)
sliderRep.GetPoint2Coordinate().SetValue(0.96, 0.96)
elif pos == 3:
sliderRep.GetPoint1Coordinate().SetValue(0.04, 0.04)
sliderRep.GetPoint2Coordinate().SetValue(0.45, 0.04)
elif pos == 4: # bottom-right
sliderRep.GetPoint1Coordinate().SetValue(0.55, 0.04)
sliderRep.GetPoint2Coordinate().SetValue(0.96, 0.04)
elif pos == 5: # bottom margin horizontal
sliderRep.GetPoint1Coordinate().SetValue(0.04, 0.04)
sliderRep.GetPoint2Coordinate().SetValue(0.96, 0.04)
elif pos == 11: # top-left vertical
sliderRep.GetPoint1Coordinate().SetValue(0.04, 0.54)
sliderRep.GetPoint2Coordinate().SetValue(0.04, 0.9)
elif pos == 12:
sliderRep.GetPoint1Coordinate().SetValue(0.96, 0.54)
sliderRep.GetPoint2Coordinate().SetValue(0.96, 0.9)
elif pos == 13:
sliderRep.GetPoint1Coordinate().SetValue(0.04, 0.1)
sliderRep.GetPoint2Coordinate().SetValue(0.04, 0.54)
elif pos == 14: # bottom-right vertical
sliderRep.GetPoint1Coordinate().SetValue(0.96, 0.1)
sliderRep.GetPoint2Coordinate().SetValue(0.96, 0.54)
elif pos == 15: # right margin vertical
sliderRep.GetPoint1Coordinate().SetValue(0.96, 0.1)
sliderRep.GetPoint2Coordinate().SetValue(0.96, 0.9)
if showValue:
if isinstance(xmin, int) and isinstance(xmax, int):
frm = "%0.0f"
else:
frm = "%0.1f"
sliderRep.SetLabelFormat(frm) # default is '%0.3g'
sliderRep.GetLabelProperty().SetShadow(0)
sliderRep.GetLabelProperty().SetBold(0)
sliderRep.GetLabelProperty().SetOpacity(0.6)
sliderRep.GetLabelProperty().SetColor(c)
if isinstance(pos, int) and pos > 10:
sliderRep.GetLabelProperty().SetOrientation(90)
else:
sliderRep.ShowSliderLabelOff()
sliderRep.GetTubeProperty().SetColor(c)
sliderRep.GetTubeProperty().SetOpacity(0.6)
sliderRep.GetSliderProperty().SetColor(c)
sliderRep.GetSelectedProperty().SetColor(0.8, 0, 0)
sliderRep.GetCapProperty().SetColor(c)
if title:
sliderRep.SetTitleText(title)
sliderRep.SetTitleHeight(0.012)
sliderRep.GetTitleProperty().SetShadow(0)
sliderRep.GetTitleProperty().SetColor(c)
sliderRep.GetTitleProperty().SetOpacity(0.6)
sliderRep.GetTitleProperty().SetBold(0)
if not utils.isSequence(pos):
if isinstance(pos, int) and pos > 10:
sliderRep.GetTitleProperty().SetOrientation(90)
else:
if abs(pos[0][0] - pos[1][0]) < 0.1:
sliderRep.GetTitleProperty().SetOrientation(90)
sliderWidget = vtk.vtkSliderWidget()
sliderWidget.SetInteractor(vp.interactor)
sliderWidget.SetAnimationModeToJump()
sliderWidget.SetRepresentation(sliderRep)
sliderWidget.AddObserver("InteractionEvent", sliderfunc)
sliderWidget.EnabledOn()
vp.sliders.append([sliderWidget, sliderfunc])
return sliderWidget |
java | public static int getPartitionForParameter(VoltType partitionType, Object invocationParameter) {
return instance.get().getSecond().getHashedPartitionForParameter(partitionType, invocationParameter);
} |
python | def confidence_interval_min_width(mu, post, alpha=0.9):
'''
Returns the minimal-width confidence interval [mu_low, mu_high] of
confidence level alpha for a posterior distribution post on the parameter mu.
'''
if not 0 < alpha < 1:
raise ValueError("Confidence level must be in (0,1).")
# choose a step size for the sliding confidence window
alpha_step = 0.01
# initialize the lower and upper limits
mu_low = numpy.min(mu)
mu_high = numpy.max(mu)
# find the smallest window (by delta-mu) stepping by dalpha
for ai in numpy.arange(0, 1 - alpha, alpha_step):
ml = compute_lower_limit(mu, post, 1 - ai)
mh = compute_upper_limit(mu, post, alpha + ai)
if mh - ml < mu_high - mu_low:
mu_low = ml
mu_high = mh
return mu_low, mu_high |
python | def get_context(self):
"""
Context sent to templates for rendering include the form's cleaned
data and also the current Request object.
"""
if not self.is_valid():
raise ValueError("Cannot generate Context when form is invalid.")
return dict(request=self.request, **self.cleaned_data) |
python | def add_item(self, text, font=("default", 12, "bold"), backgroundcolor="yellow", textcolor="black",
highlightcolor="blue"):
"""
Add a new item on the Canvas.
:param text: text to display
:type text: str
:param font: font of the text
:type font: tuple or :class:`~tkinter.font.Font`
:param backgroundcolor: background color
:type backgroundcolor: str
:param textcolor: text color
:type textcolor: str
:param highlightcolor: the color of the text when the item is selected
:type highlightcolor: str
"""
item = self.canvas.create_text(0, 0, anchor=tk.NW, text=text, font=font, fill=textcolor, tag="item")
rectangle = self.canvas.create_rectangle(self.canvas.bbox(item), fill=backgroundcolor)
self.canvas.tag_lower(rectangle, item)
self.items[item] = rectangle
if callable(self._callback_add):
self._callback_add(item, rectangle)
self.item_colors[item] = (backgroundcolor, textcolor, highlightcolor) |
python | def add_exec_to_user(
self,
name,
env,
command,
args,
**attrs
):
"""Add an exec option to your user."""
# Add exec option.
exec_options = {
'command': command,
'env': env,
'args': args,
}
exec_options.update(attrs)
# Add exec to user.
self.add_to_user(name=name, exec=exec_options) |
java | protected AstEval eval(boolean required, boolean deferred) throws ScanException, ParseException {
AstEval v = null;
Symbol start_eval = deferred ? START_EVAL_DEFERRED : START_EVAL_DYNAMIC;
if (token.getSymbol() == start_eval) {
consumeToken();
v = new AstEval(expr(true), deferred);
consumeToken(END_EVAL);
} else if (required) {
fail(start_eval);
}
return v;
} |
java | private static List<BuiltInQProfile> reduceBuiltInQualityProfiles(Context context) throws SQLException {
ListMultimap<String, BuiltInQProfile> builtInQPByLanguages = ArrayListMultimap.create();
List<BuiltInQProfile> builtInQProfiles = context.prepareSelect("SELECT kee, language, name FROM rules_profiles WHERE is_built_in = ?")
.setBoolean(1, true)
.list(row -> new BuiltInQProfile(row.getString(1), row.getString(2), row.getString(3)));
builtInQProfiles.forEach(builtInQProfile -> builtInQPByLanguages.put(builtInQProfile.language, builtInQProfile));
// Filter all built in quality profiles to have only one by language
// And prefer the one named "Sonar Way"
builtInQPByLanguages.keySet().forEach(l -> {
List<BuiltInQProfile> qps = builtInQPByLanguages.get(l);
if (qps.size() > 1) {
BuiltInQProfile sonarWay = qps.stream().filter(qp -> qp.name.equals("Sonar way"))
.findFirst()
.orElse(qps.get(0));
qps.forEach(qp -> {
if (qp.kee.equals(sonarWay.kee)) {
return;
}
builtInQProfiles.remove(qp);
});
}
});
return builtInQProfiles;
} |
python | def from_dict(self, data, recursive=1):
"""Populate the resource from a python dict
:param recursive: level of recursion for fetching resources
:type recursive: int
"""
# Find other linked resources
data = self._encode_resource(data, recursive=recursive)
self.data = data |
python | def update_user(self, username, profile, owner_privkey):
"""
Update profile_hash on blockchain
"""
url = self.base_url + "/users/" + username + "/update"
owner_pubkey = get_pubkey_from_privkey(owner_privkey)
payload = {
'profile': profile,
'owner_pubkey': owner_pubkey
}
resp = self._post_request(url, payload)
try:
unsigned_tx = resp['unsigned_tx']
except:
return resp
dht_resp = write_dht_profile(profile)
dht_resp = dht_resp[0]
if not dht_resp['status'] == 'success':
return {"error": "DHT write failed"}
# sign all unsigned inputs
signed_tx = sign_all_unsigned_inputs(owner_privkey, unsigned_tx)
return self.broadcast_transaction(signed_tx) |
java | public IpcLogEntry withServerZone(String zone) {
this.serverZone = zone;
if (serverRegion == null) {
serverRegion = extractRegionFromZone(zone);
}
return this;
} |
python | def insert_load_command_into_header(header, load_command):
""" Inserts the given load command into the header and adjust its size. """
lc, cmd, path = load_command
header.commands.append((lc, cmd, path))
header.header.ncmds += 1
header.changedHeaderSizeBy(lc.cmdsize) |
java | public Serializable copy(Serializable obj)
{
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "copy : " + Util.identity(obj));
// -----------------------------------------------------------------------
// Optimize copyObject by special casing null, immutable objects,
// and primitive arrays. All of these can be handled much more
// efficiently than performing a 'deep' copy. d154342.7
// -----------------------------------------------------------------------
if (obj == null)
{
return obj;
}
Class<?> objType = obj.getClass();
// if the object is a primitive wrapper class, then return it.
if ((objType == String.class) ||
(objType == Integer.class) ||
(objType == Long.class) ||
(objType == Boolean.class) ||
(objType == Byte.class) ||
(objType == Character.class) ||
(objType == Float.class) ||
(objType == Double.class) ||
(objType == Short.class))
{
// Yes, so do nothing...
return obj;
}
Class<?> componentType = objType.getComponentType();
// If this is an array of primitives take a clone instead of deep copy
if (componentType != null && componentType.isPrimitive())
{
if (componentType == boolean.class)
return ((boolean[]) obj).clone();
if (componentType == byte.class)
return ((byte[]) obj).clone();
if (componentType == char.class)
return ((char[]) obj).clone();
if (componentType == short.class)
return ((short[]) obj).clone();
if (componentType == int.class)
return ((int[]) obj).clone();
if (componentType == long.class)
return ((long[]) obj).clone();
if (componentType == float.class)
return ((float[]) obj).clone();
if (componentType == double.class)
return ((double[]) obj).clone();
}
// End d154342.7
if (isTraceOn && tc.isDebugEnabled())
Tr.debug(tc, "copy : making a deep copy");
return copySerializable(obj);
} |
java | public static sslpolicylabel[] get_filtered(nitro_service service, String filter) throws Exception{
sslpolicylabel obj = new sslpolicylabel();
options option = new options();
option.set_filter(filter);
sslpolicylabel[] response = (sslpolicylabel[]) obj.getfiltered(service, option);
return response;
} |
python | def parse_environs(name, parse_class=ParseResult, **defaults):
"""
same as parse_environ() but will also check name_1, name_2, ..., name_N and
return all the found dsn strings from the environment
this will look for name, and name_N (where N is 1 through infinity) in the environment,
if it finds them, it will assume they are dsn urls and will parse them.
The num checks (eg PROM_DSN_1, PROM_DSN_2) go in order, so you can't do PROM_DSN_1, PROM_DSN_3,
because it will fail on _2 and move on, so make sure your num dsns are in order (eg, 1, 2, 3, ...)
example --
export DSN_1=some.Interface://host:port/dbname#i1
export DSN_2=some.Interface://host2:port/dbname2#i2
$ python
>>> import dsnparse
>>> print dsnparse.parse_environs('DSN') # prints list with 2 parsed dsn objects
:param dsn_env_name: string, the name of the environment variables, _1, ... will be appended
:param parse_class: ParseResult, the class that will be used to hold parsed values
:returns: list all the found dsn strings in the environment with the given name prefix
"""
ret = []
if name in os.environ:
ret.append(parse_environ(name, parse_class, **defaults))
# now try importing _1 -> _N dsns
increment_name = lambda name, num: '{name}_{num}'.format(name=name, num=num)
dsn_num = 0 if increment_name(name, 0) in os.environ else 1
dsn_env_num_name = increment_name(name, dsn_num)
if dsn_env_num_name in os.environ:
try:
while True:
ret.append(parse_environ(dsn_env_num_name, parse_class, **defaults))
dsn_num += 1
dsn_env_num_name = increment_name(name, dsn_num)
except KeyError:
pass
return ret |
python | def compute_work_statistics(self):
"""Computes statistics from all work pieces stored in this class."""
result = {}
for v in itervalues(self.work):
submission_id = v['submission_id']
if submission_id not in result:
result[submission_id] = {
'completed': 0,
'num_errors': 0,
'error_messages': set(),
'eval_times': [],
'min_eval_time': None,
'max_eval_time': None,
'mean_eval_time': None,
'median_eval_time': None,
}
if not v['is_completed']:
continue
result[submission_id]['completed'] += 1
if 'error' in v and v['error']:
result[submission_id]['num_errors'] += 1
result[submission_id]['error_messages'].add(v['error'])
else:
result[submission_id]['eval_times'].append(float(v['elapsed_time']))
for v in itervalues(result):
if v['eval_times']:
v['min_eval_time'] = np.min(v['eval_times'])
v['max_eval_time'] = np.max(v['eval_times'])
v['mean_eval_time'] = np.mean(v['eval_times'])
v['median_eval_time'] = np.median(v['eval_times'])
return result |
java | public static Builder addValuesToAnnotationArgument(
AnnotationTree annotation,
String parameterName,
Collection<String> newValues,
VisitorState state) {
if (annotation.getArguments().isEmpty()) {
String parameterPrefix = parameterName.equals("value") ? "" : (parameterName + " = ");
return SuggestedFix.builder()
.replace(
annotation,
annotation
.toString()
.replaceFirst("\\(\\)", "(" + parameterPrefix + newArgument(newValues) + ")"));
}
Optional<ExpressionTree> maybeExistingArgument = findArgument(annotation, parameterName);
if (!maybeExistingArgument.isPresent()) {
return SuggestedFix.builder()
.prefixWith(
annotation.getArguments().get(0),
parameterName + " = " + newArgument(newValues) + ", ");
}
ExpressionTree existingArgument = maybeExistingArgument.get();
if (!existingArgument.getKind().equals(NEW_ARRAY)) {
return SuggestedFix.builder()
.replace(
existingArgument, newArgument(state.getSourceForNode(existingArgument), newValues));
}
NewArrayTree newArray = (NewArrayTree) existingArgument;
if (newArray.getInitializers().isEmpty()) {
return SuggestedFix.builder().replace(newArray, newArgument(newValues));
} else {
return SuggestedFix.builder()
.postfixWith(getLast(newArray.getInitializers()), ", " + Joiner.on(", ").join(newValues));
}
} |
java | public ResourceFormatGenerator getGeneratorForFileExtension(final String extension) throws
UnsupportedFormatException {
for (final ResourceFormatGenerator generator : listGenerators()) {
if (generator.getFileExtensions().contains(extension)) {
return generator;
}
}
throw new UnsupportedFormatException("No provider available to parse file extension: " + extension);
} |
python | def _is_valid_ins(self, ins_ir):
"""Check for instruction validity as a gadgets.
"""
invalid_instrs = [
ReilMnemonic.JCC,
ReilMnemonic.UNDEF,
ReilMnemonic.UNKN,
]
return not any([i.mnemonic in invalid_instrs for i in ins_ir]) |
java | @Nonnull
public FineUploader5Validation setAllowedExtensions (@Nullable final Collection <String> aAllowedExtensions)
{
m_aValidationAllowedExtensions.setAll (aAllowedExtensions);
return this;
} |
java | void closeAllScopeModules() {
synchronized (puScopes) {
for (String module : puScopes.keySet()) {
close(module, false); //PK59717
}
puScopes.clear();
}
} |
java | protected synchronized void connect_event_channel(ConnectionStructure cs) throws DevFailed {
// Get a reference to an EventChannel for
// this device server from the tango database
DeviceProxy adminDevice = DeviceProxyFactory.get(
cs.channelName, cs.database.getUrl().getTangoHost());
DbEventImportInfo received = getEventImportInfo(cs.channelName, cs.database, adminDevice);
// Keep host name without Fully Qualify Domain Name
int idx = received.host.indexOf('.');
if (idx > 0)
received.host = received.host.substring(0, idx);
// Connect the notify daemon
connectToNotificationDaemon(received);
StructuredProxyPushSupplier
structuredProxyPushSupplier = getStructuredProxyPushSupplier(cs.channelName);
if (cs.reconnect) {
EventChannelStruct eventChannelStruct = channel_map.get(cs.channelName);
eventChannelStruct.eventChannel = eventChannel;
eventChannelStruct.structuredProxyPushSupplier = structuredProxyPushSupplier;
eventChannelStruct.last_heartbeat = System.currentTimeMillis();
eventChannelStruct.heartbeat_skipped = false;
eventChannelStruct.host = received.host;
eventChannelStruct.has_notifd_closed_the_connection = 0;
try {
int filter_id = eventChannelStruct.heartbeat_filter_id;
Filter filter = eventChannelStruct.structuredProxyPushSupplier.get_filter(filter_id);
eventChannelStruct.structuredProxyPushSupplier.remove_filter(filter_id);
filter.destroy();
} catch (FilterNotFound e) {
// Do Nothing
}
// Add filter for heartbeat events on channelName
String constraint_expr = "$event_name == \'heartbeat\'";
eventChannelStruct.heartbeat_filter_id = add_filter_for_channel(eventChannelStruct, constraint_expr);
setEventChannelTimeoutMillis(eventChannelStruct.eventChannel, 3000);
} else {
EventChannelStruct newEventChannelStruct = new EventChannelStruct();
newEventChannelStruct.eventChannel = eventChannel;
newEventChannelStruct.structuredProxyPushSupplier = structuredProxyPushSupplier;
newEventChannelStruct.last_heartbeat = System.currentTimeMillis();
newEventChannelStruct.heartbeat_skipped = false;
newEventChannelStruct.adm_device_proxy = adminDevice;
newEventChannelStruct.host = received.host;
newEventChannelStruct.has_notifd_closed_the_connection = 0;
newEventChannelStruct.consumer = this;
// Add filter for heartbeat events on channelName
String constraint_expr = "$event_name == \'heartbeat\'";
newEventChannelStruct.heartbeat_filter_id = add_filter_for_channel(newEventChannelStruct, constraint_expr);
channel_map.put(cs.channelName, newEventChannelStruct);
setEventChannelTimeoutMillis(newEventChannelStruct.eventChannel, 3000);
}
} |
python | def validate(self, method, *args, **kwargs):
"""Validate authentication and values passed to the specified method.
Raises a PyCronofyValidationError on error.
:param string method: Method name to check.
:param *args: Arguments for "Method".
:param **kwargs: Keyword arguments for "Method".
"""
validate(method, self.auth, *args, **kwargs) |
java | private void parseStartTag(boolean xmldecl, boolean throwOnResolveFailure) throws IOException, KriptonRuntimeException {
if (!xmldecl) {
read('<');
}
name = readName();
attributeCount = 0;
while (true) {
skip();
if (position >= limit && !fillBuffer(1)) {
checkRelaxed(UNEXPECTED_EOF);
return;
}
int c = buffer[position];
if (xmldecl) {
if (c == '?') {
position++;
read('>');
return;
}
} else {
if (c == '/') {
degenerated = true;
position++;
skip();
read('>');
break;
} else if (c == '>') {
position++;
break;
}
}
String attrName = readName();
int i = (attributeCount++) * 4;
attributes = ensureCapacity(attributes, i + 4);
attributes[i] = "";
attributes[i + 1] = null;
attributes[i + 2] = attrName;
skip();
if (position >= limit && !fillBuffer(1)) {
checkRelaxed(UNEXPECTED_EOF);
return;
}
if (buffer[position] == '=') {
position++;
skip();
if (position >= limit && !fillBuffer(1)) {
checkRelaxed(UNEXPECTED_EOF);
return;
}
char delimiter = buffer[position];
if (delimiter == '\'' || delimiter == '"') {
position++;
} else if (relaxed) {
delimiter = ' ';
} else {
throw new KriptonRuntimeException("attr value delimiter missing!", true, this.getLineNumber(), this.getColumnNumber(), getPositionDescription(), null);
}
attributes[i + 3] = readValue(delimiter, true, throwOnResolveFailure, ValueContext.ATTRIBUTE);
if (delimiter != ' ') {
position++; // end quote
}
} else if (relaxed) {
attributes[i + 3] = attrName;
} else {
checkRelaxed("Attr.value missing f. " + attrName);
attributes[i + 3] = attrName;
}
}
int sp = depth++ * 4;
elementStack = ensureCapacity(elementStack, sp + 4);
elementStack[sp + 3] = name;
if (depth >= nspCounts.length) {
int[] bigger = new int[depth + 4];
System.arraycopy(nspCounts, 0, bigger, 0, nspCounts.length);
nspCounts = bigger;
}
nspCounts[depth] = nspCounts[depth - 1];
if (processNsp) {
adjustNsp();
} else {
namespace = "";
}
// For consistency with Expat, add default attributes after fixing
// namespaces.
if (defaultAttributes != null) {
Map<String, String> elementDefaultAttributes = defaultAttributes.get(name);
if (elementDefaultAttributes != null) {
for (Map.Entry<String, String> entry : elementDefaultAttributes.entrySet()) {
if (getAttributeValue(null, entry.getKey()) != null) {
continue; // an explicit value overrides the default
}
int i = (attributeCount++) * 4;
attributes = ensureCapacity(attributes, i + 4);
attributes[i] = "";
attributes[i + 1] = null;
attributes[i + 2] = entry.getKey();
attributes[i + 3] = entry.getValue();
}
}
}
elementStack[sp] = namespace;
elementStack[sp + 1] = prefix;
elementStack[sp + 2] = name;
} |
python | def restrict_to_parent(self, target, parent):
"""Restrict target to parent structure boundaries."""
if not (parent['start'] < target < parent['end']):
target = parent['end']
return target |
python | def items(self) -> Tuple[Tuple[str, "Package"], ...]: # type: ignore
"""
Return an iterable containing package name and
corresponding `Package` instance that are available.
"""
item_dict = {
name: self.build_dependencies.get(name) for name in self.build_dependencies
}
return tuple(item_dict.items()) |
python | def absent(name, **kwargs):
'''
This function deletes the specified repo on the system, if it exists. It
is essentially a wrapper around pkg.del_repo.
name
The name of the package repo, as it would be referred to when running
the regular package manager commands.
**UBUNTU-SPECIFIC OPTIONS**
ppa
On Ubuntu, you can take advantage of Personal Package Archives on
Launchpad simply by specifying the user and archive name.
.. code-block:: yaml
logstash-ppa:
pkgrepo.absent:
- ppa: wolfnet/logstash
ppa_auth
For Ubuntu PPAs there can be private PPAs that require authentication
to access. For these PPAs the username/password can be specified. This
is required for matching if the name format uses the ``ppa:`` specifier
and is private (requires username/password to access, which is encoded
in the URI).
.. code-block:: yaml
logstash-ppa:
pkgrepo.absent:
- ppa: wolfnet/logstash
- ppa_auth: username:password
keyid
If passed, then the GPG key corresponding to the passed KeyID will also
be removed.
keyid_ppa : False
If set to ``True``, the GPG key's ID will be looked up from
ppa.launchpad.net and removed, and the ``keyid`` argument will be
ignored.
.. note::
This option will be disregarded unless the ``ppa`` argument is
present.
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
if 'ppa' in kwargs and __grains__['os'] in ('Ubuntu', 'Mint'):
name = kwargs.pop('ppa')
if not name.startswith('ppa:'):
name = 'ppa:' + name
remove_key = any(kwargs.get(x) is not None
for x in ('keyid', 'keyid_ppa'))
if remove_key and 'pkg.del_repo_key' not in __salt__:
ret['result'] = False
ret['comment'] = \
'Repo key management is not implemented for this platform'
return ret
try:
repo = __salt__['pkg.get_repo'](name, **kwargs)
except CommandExecutionError as exc:
ret['result'] = False
ret['comment'] = \
'Failed to configure repo \'{0}\': {1}'.format(name, exc)
return ret
if not repo:
ret['comment'] = 'Package repo {0} is absent'.format(name)
ret['result'] = True
return ret
if __opts__['test']:
ret['comment'] = ('Package repo \'{0}\' will be removed. This may '
'cause pkg states to behave differently than stated '
'if this action is repeated without test=True, due '
'to the differences in the configured repositories.'
.format(name))
return ret
try:
__salt__['pkg.del_repo'](repo=name, **kwargs)
except (CommandExecutionError, SaltInvocationError) as exc:
ret['result'] = False
ret['comment'] = exc.strerror
return ret
repos = __salt__['pkg.list_repos']()
if name not in repos:
ret['changes']['repo'] = name
ret['comment'] = 'Removed repo {0}'.format(name)
if not remove_key:
ret['result'] = True
else:
try:
removed_keyid = __salt__['pkg.del_repo_key'](name, **kwargs)
except (CommandExecutionError, SaltInvocationError) as exc:
ret['result'] = False
ret['comment'] += ', but failed to remove key: {0}'.format(exc)
else:
ret['result'] = True
ret['changes']['keyid'] = removed_keyid
ret['comment'] += ', and keyid {0}'.format(removed_keyid)
else:
ret['result'] = False
ret['comment'] = 'Failed to remove repo {0}'.format(name)
return ret |
java | public static void logExceptionRetrieveArchive(final Logger logger,
final ArchiveDescription archive, final Exception e)
{
logger.logException(Level.ERROR, "Exception while accessing archive "
+ archive.toString(), e);
} |
python | def is_cursor_on_first_line(self):
"""Return True if cursor is on the first line"""
cursor = self.textCursor()
cursor.movePosition(QTextCursor.StartOfBlock)
return cursor.atStart() |
java | private static void replaceThis(Node expectedGetprop, Node replacement) {
Node leftChild = expectedGetprop.getFirstChild();
if (leftChild.isThis()) {
expectedGetprop.replaceChild(leftChild, replacement);
} else {
replaceThis(leftChild, replacement);
}
} |
java | public void copyFromLocalFile(boolean delSrc, boolean overwrite,
boolean validate, Path src, Path dst)
throws IOException {
Configuration conf = getConf();
FileUtil.copy(getLocal(conf), src, this, dst, delSrc, overwrite,
validate, conf);
} |
java | public static <T> void mergeFrom(InputStream in, T message, Schema<T> schema,
XMLInputFactory inFactory) throws IOException
{
XMLStreamReader parser = null;
try
{
parser = inFactory.createXMLStreamReader(in, XML_ENCODING);
mergeFrom(parser, message, schema);
}
catch (XMLStreamException e)
{
throw new XmlInputException(e);
}
finally
{
if (parser != null)
{
try
{
parser.close();
}
catch (XMLStreamException e)
{
// ignore
}
}
}
} |
java | public void reset() {
mGestureInProgress = false;
mPointerCount = 0;
for (int i = 0; i < MAX_POINTERS; i++) {
mId[i] = MotionEvent.INVALID_POINTER_ID;
}
} |
java | @Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case AfplibPackage.FINISHING_FIDELITY__STP_FIN_EX:
setStpFinEx((Integer)newValue);
return;
case AfplibPackage.FINISHING_FIDELITY__REP_FIN_EX:
setRepFinEx((Integer)newValue);
return;
}
super.eSet(featureID, newValue);
} |
java | public synchronized void lostNodeFound(Address address) {
Preconditions.checkNotNull(address, "address should not be null");
mLostNodes.remove(address);
for (Runnable function : mChangeListeners) {
function.run();
}
} |
python | def _get_minmax_edges(self, edge):
'''
Updates the upper and lower depths based on the input edges
'''
if isinstance(edge, Line):
# For instance of line class need to loop over values
depth_vals = np.array([node.depth for node in edge.points])
else:
depth_vals = edge[:, 2]
temp_upper_depth = np.min(depth_vals)
if not self.upper_depth:
self.upper_depth = temp_upper_depth
else:
if temp_upper_depth < self.upper_depth:
self.upper_depth = temp_upper_depth
temp_lower_depth = np.max(depth_vals)
if not self.lower_depth:
self.lower_depth = temp_lower_depth
else:
if temp_lower_depth > self.lower_depth:
self.lower_depth = temp_lower_depth |
python | def normalize_likes(sql):
"""
Normalize and wrap LIKE statements
:type sql str
:rtype: str
"""
sql = sql.replace('%', '')
# LIKE '%bot'
sql = re.sub(r"LIKE '[^\']+'", 'LIKE X', sql)
# or all_groups LIKE X or all_groups LIKE X
matches = re.finditer(r'(or|and) [^\s]+ LIKE X', sql, flags=re.IGNORECASE)
matches = [match.group(0) for match in matches] if matches else None
if matches:
for match in set(matches):
sql = re.sub(r'(\s?' + re.escape(match) + ')+', ' ' + match + ' ...', sql)
return sql |
java | public void recolorQuery() throws Exception {
PrefixManager man = apic.getMutablePrefixManager();
String input = doc.getText(0, doc.getLength());
ImmutableList<TargetAtom> current_query = parse(input, man);
if (current_query == null) {
JOptionPane.showMessageDialog(null, "An error occured while parsing the mappings. For more info, see the logs.");
throw new Exception("Unable to parse the query: " + input + ", " + parsingException);
}
input = doc.getText(0, doc.getLength());
resetStyles();
int pos = input.indexOf("(", 0);
while (pos != -1) {
doc.setCharacterAttributes(pos, 1, brackets, false);
pos = input.indexOf("(", pos + 1);
}
pos = input.indexOf(")", 0);
while (pos != -1) {
doc.setCharacterAttributes(pos, 1, brackets, false);
pos = input.indexOf(")", pos + 1);
}
pos = input.indexOf(".", 0);
while (pos != -1) {
doc.setCharacterAttributes(pos, 1, black, false);
pos = input.indexOf(".", pos + 1);
}
pos = input.indexOf(",", 0);
while (pos != -1) {
doc.setCharacterAttributes(pos, 1, black, false);
pos = input.indexOf(",", pos + 1);
}
pos = input.indexOf(":", 0);
while (pos != -1) {
doc.setCharacterAttributes(pos, 1, black, false);
pos = input.indexOf(":", pos + 1);
}
MutableOntologyVocabulary vocabulary = apic.getCurrentVocabulary();
for (TargetAtom atom : current_query) {
Optional<IRI> optionalPredicateIri = atom.getPredicateIRI();
if (optionalPredicateIri.isPresent()) {
IRI predicateIri = optionalPredicateIri.get();
String shortIRIForm = man.getShortForm(predicateIri.getIRIString());
if (vocabulary.classes().contains(predicateIri)) {
ColorTask task = new ColorTask(shortIRIForm, clazz);
tasks.add(task);
} else if (vocabulary.objectProperties().contains(predicateIri)) {
ColorTask task = new ColorTask(shortIRIForm, objectProp);
tasks.add(task);
} else if (vocabulary.dataProperties().contains(predicateIri)) {
ColorTask task = new ColorTask(shortIRIForm, dataProp);
tasks.add(task);
} else if (vocabulary.annotationProperties().contains(predicateIri)) {
ColorTask task = new ColorTask(shortIRIForm, annotProp);
tasks.add(task);
}
}
ImmutableList<ImmutableTerm> substitutedTerms = atom.getSubstitutedTerms();
RDFAtomPredicate atomPredicate = (RDFAtomPredicate) atom.getProjectionAtom().getPredicate();
ImmutableTerm term1 = atomPredicate.getSubject(substitutedTerms);
ImmutableTerm term2 = atomPredicate.getObject(substitutedTerms);
if (term1 instanceof IRIConstant) {
String rendered = man.getShortForm(((IRIConstant) term1).getIRI().toString());
ColorTask task = new ColorTask(rendered, individual);
tasks.add(task);
}
if (term2 instanceof IRIConstant) {
String rendered = man.getShortForm(((IRIConstant) term2).getIRI().toString());
ColorTask task = new ColorTask(rendered, individual);
tasks.add(task);
}
}
ColorTask[] taskArray = order(tasks);
for (int i = 0; i < taskArray.length; i++) {
if (taskArray[i].text != null) {
int index = input.indexOf(taskArray[i].text, 0);
while (index != -1) {
doc.setCharacterAttributes(index, taskArray[i].text.length(), taskArray[i].set, true);
index = input.indexOf(taskArray[i].text, index + 1);
}
}
}
tasks.clear();
} |
java | public Extractor getExtractor() {
try {
this.rawSourceExtractor = getSource().getExtractor(this.taskState);
boolean throttlingEnabled = this.taskState.getPropAsBoolean(ConfigurationKeys.EXTRACT_LIMIT_ENABLED_KEY,
ConfigurationKeys.DEFAULT_EXTRACT_LIMIT_ENABLED);
if (throttlingEnabled) {
Limiter limiter = DefaultLimiterFactory.newLimiter(this.taskState);
if (!(limiter instanceof NonRefillableLimiter)) {
throw new IllegalArgumentException("The Limiter used with an Extractor should be an instance of "
+ NonRefillableLimiter.class.getSimpleName());
}
return new LimitingExtractorDecorator<>(this.rawSourceExtractor, limiter, this.taskState);
}
return this.rawSourceExtractor;
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
} |
python | def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(start_tag)
element._InternalSerialize(write)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(start_tag)
value._InternalSerialize(write)
return write(end_tag)
return EncodeField |
python | def get_single_allele_from_reads(allele_reads):
"""
Given a sequence of AlleleRead objects, which are expected to all have
the same allele, return that allele.
"""
allele_reads = list(allele_reads)
if len(allele_reads) == 0:
raise ValueError("Expected non-empty list of AlleleRead objects")
seq = allele_reads[0].allele
if any(read.allele != seq for read in allele_reads):
raise ValueError("Expected all AlleleRead objects to have same allele '%s', got %s" % (
seq, allele_reads))
return seq |
java | public Integer convertStringToInteger(String value) {
Integer result;
try {
result = Integer.valueOf(value);
} catch (NumberFormatException e) {
result = DEFAULT_INT_VALUE;
}
return result;
} |
python | def searchExpressionLevelsInDb(
self, rnaQuantId, names=[], threshold=0.0, startIndex=0,
maxResults=0):
"""
:param rnaQuantId: string restrict search by quantification id
:param threshold: float minimum expression values to return
:return an array of dictionaries, representing the returned data.
"""
sql = ("SELECT * FROM Expression WHERE "
"rna_quantification_id = ? "
"AND expression > ? ")
sql_args = (rnaQuantId, threshold)
if len(names) > 0:
sql += "AND name in ("
sql += ",".join(['?' for name in names])
sql += ") "
for name in names:
sql_args += (name,)
sql += sqlite_backend.limitsSql(
startIndex=startIndex, maxResults=maxResults)
query = self._dbconn.execute(sql, sql_args)
return sqlite_backend.iterativeFetch(query) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.