language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def delete(self, name):
""" deletes model with given name """
if name not in self._parent:
raise KeyError('model "{}" not present'.format(name))
del self._parent[name]
if self._current_model_group == name:
self._current_model_group = None
|
java
|
protected void before(HttpServletRequest request, HttpServletResponse response) {
final StringBuilder sb = new StringBuilder();
final String beginDecoration;
if (isSubRequestUrl(request)) {
beginDecoration = "- - - - - - - - - - {SUB BEGIN}: ";
} else { // mainly here
beginDecoration = "* * * * * * * * * * {BEGIN}: ";
}
sb.append(beginDecoration);
sb.append(getTitlePath(request));
sb.append(LF).append(IND);
buildRequestInfo(sb, request, response, /*showResponse*/false, /*showErrorFlush*/false);
logger.debug(sb.toString().trim());
}
|
java
|
public static FormInputHandler create() {
return new FormInputHandler(WebElementFinder.create().noLogging(true), null, null, null, null, null, null, null, null);
}
|
python
|
def join(self, qs=None):
'''
Join one queryset together with another using a temporary table. If
no queryset is used, it will use the current queryset and join that
to itself.
`Join` either uses the current queryset and effectively does a self-join to
create a new limited queryset OR it uses a querset given by the user.
The model of a given queryset needs to contain a valid foreign key to
the current queryset to perform a join. A new queryset is then created.
'''
to_field = 'id'
if qs:
fk = [
fk for fk in qs.model._meta.fields
if getattr(fk, 'related_model', None) == self.model
]
fk = fk[0] if fk else None
model_set = '{}_set'.format(self.model.__name__.lower())
key = fk or getattr(qs.model, model_set, None)
if not key:
raise ValueError('QuerySet is not related to current model')
try:
fk_column = key.column
except AttributeError:
fk_column = 'id'
to_field = key.field.column
qs = qs.only(fk_column)
# if we give a qs we need to keep the model qs to not lose anything
new_qs = self
else:
fk_column = 'id'
qs = self.only(fk_column)
new_qs = self.model.objects.all()
TABLE_NAME = 'temp_stuff'
query = self.get_quoted_query(qs.query)
sql = '''
DROP TABLE IF EXISTS {table_name};
DROP INDEX IF EXISTS {table_name}_id;
CREATE TEMPORARY TABLE {table_name} AS {query};
CREATE INDEX {table_name}_{fk_column} ON {table_name} ({fk_column});
'''.format(table_name=TABLE_NAME, fk_column=fk_column, query=str(query))
with connection.cursor() as cursor:
cursor.execute(sql)
class TempModel(models.Model):
temp_key = models.ForeignKey(
self.model,
on_delete=models.DO_NOTHING,
db_column=fk_column,
to_field=to_field
)
class Meta:
managed = False
db_table = TABLE_NAME
conn = Join(
table_name=TempModel._meta.db_table,
parent_alias=new_qs.query.get_initial_alias(),
table_alias=None,
join_type='INNER JOIN',
join_field=self.model.tempmodel_set.rel,
nullable=False
)
new_qs.query.join(conn, reuse=None)
return new_qs
|
java
|
@ExportConstructor
public static org.geomajas.geometry.Coordinate constructor(double x, double y) {
return new org.geomajas.geometry.Coordinate(x, y);
}
|
java
|
@SuppressWarnings("checkstyle:magicnumber")
private static String escapeCharacter(final char ch) {
final String prefix;
if (ch < 0x10) {
prefix = "000";
} else if (ch < 0x100) {
prefix = "00";
} else if (ch < 0x1000) {
prefix = "0";
} else {
prefix = "";
}
return "\\u" + prefix + Integer.toHexString(ch);
}
|
python
|
def configure_settings():
"""
Configures settings for manage.py and for run_tests.py.
"""
if not settings.configured:
db_config = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'django_kittens_db.sqlite3',
}
settings.configure(
TEST_RUNNER='django_nose.NoseTestSuiteRunner',
NOSE_ARGS=['--nocapture', '--nologcapture', '--verbosity=1'],
DATABASES={
'default': db_config,
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django_kittens',
'django_kittens.tests',
),
ROOT_URLCONF='django_kittens.urls',
DEBUG=True,
MIDDLEWARE_CLASSES=(),
)
|
java
|
public static void closeWriter(String filePath) {
Writer writer = writerCache.get(filePath);
if (writer != null) {
try {
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
writerCache.remove(writer);
}
|
java
|
public void addOntology(OBOOntology ont) {
for (String id : ont.terms.keySet()) {
addTerm(ont.terms.get(id));
}
}
|
java
|
protected void processQueue() {
try {
QueueItem item = queue.take();
client.addMultipleCounterDataPoints(item.tenantId, item.data);
} catch (InterruptedException e) {
// TODO better logging of this unlikely error
e.printStackTrace();
return;
}
}
|
python
|
def get_scheduler_from_hostname(self, host_name):
"""Get scheduler linked to the given host_name
:param host_name: host_name we want the scheduler from
:type host_name: str
:return: scheduler with id corresponding to the mapping table
:rtype: dict
"""
scheduler_uuid = self.hosts_schedulers.get(host_name, None)
return self.schedulers.get(scheduler_uuid, None)
|
python
|
def get_did_providers(self, did):
"""
Return the list providers registered on-chain for the given did.
:param did: hex str the id of an asset on-chain
:return:
list of addresses
None if asset has no registerd providers
"""
register_values = self.contract_concise.getDIDRegister(did)
if register_values and len(register_values) == 5:
return DIDRegisterValues(*register_values).providers
return None
|
python
|
def fit(self, X, y=None, input_type='data'):
"""
Fit the model from data in X.
Parameters
----------
input_type : string, one of: 'data', 'distance' or 'affinity'.
The values of input data X. (default = 'data')
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples
and n_features is the number of features.
If self.input_type is distance, or affinity:
X : array-like, shape (n_samples, n_samples),
Interpret X as precomputed distance or adjacency graph
computed from samples.
Returns
-------
self : object
Returns the instance itself.
"""
X = self._validate_input(X, input_type)
self.fit_geometry(X, input_type)
random_state = check_random_state(self.random_state)
self.embedding_, self.eigenvalues_, self.eigenvectors_ = spectral_embedding(self.geom_,
n_components = self.n_components,
eigen_solver = self.eigen_solver,
random_state = random_state,
drop_first = self.drop_first,
diffusion_maps = self.diffusion_maps,
diffusion_time = self.diffusion_time,
solver_kwds = self.solver_kwds)
self.affinity_matrix_ = self.geom_.affinity_matrix
self.laplacian_matrix_ = self.geom_.laplacian_matrix
self.laplacian_matrix_type_ = self.geom_.laplacian_method
return self
|
java
|
public static IntStream zip(final Collection<? extends IntStream> c, final int[] valuesForNone, final IntNFunction<Integer> zipFunction) {
return Stream.zip(c, valuesForNone, zipFunction).mapToInt(ToIntFunction.UNBOX);
}
|
java
|
@Override
public SelectQuery getQuery() {
SelectQuery selectQuery = new SelectQuery();
List<DbColumn> columns = dbTable.getColumns();
if(!columns.isEmpty()) {
selectQuery.addColumns(columns.toArray(new Column[columns.size()]));
} else {
selectQuery.addAllTableColumns(dbTable);
}
selectQuery.addFromTable(dbTable);
if(sort != null) {
selectQuery.addOrderings(sort);
}
applyFilters(selectQuery);
query = selectQuery;
return query;
}
|
python
|
def GetVectorAsNumpy(numpy_type, buf, count, offset):
""" GetVecAsNumpy decodes values starting at buf[head] as
`numpy_type`, where `numpy_type` is a numpy dtype. """
if np is not None:
# TODO: could set .flags.writeable = False to make users jump through
# hoops before modifying...
return np.frombuffer(buf, dtype=numpy_type, count=count, offset=offset)
else:
raise NumpyRequiredForThisFeature('Numpy was not found.')
|
python
|
def validate(self, value, messages=None, prefix=None):
"""validate(value[, messages[, prefix]]) -> True | False
Validates the given value according to this PrimitiveType
definition. Validation error messages are appended to an optional
messages array, each with the optional message prefix.
"""
valid = False
def log(msg):
if messages is not None:
if prefix is not None:
tok = msg.split()
msg = prefix + ' ' + tok[0].lower() + " " + " ".join(tok[1:])
messages.append(msg)
if self.string:
valid = type(value) is str
else:
if type(value) is str:
log("String '%s' cannot be represented as a number." % value)
elif type(value) not in (int, long, float):
log("Value '%s' is not a primitive type." % str(value))
elif type(value) is float and not self.float:
log("Float '%g' cannot be represented as an integer." % value)
else:
if value < self.min or value > self.max:
args = (str(value), self.min, self.max)
log("Value '%s' out of range [%d, %d]." % args)
else:
valid = True
return valid
|
python
|
def _get_result(self, resource):
"""
Converts the given resource to a result to be returned from the view.
Unless a custom renderer is employed, this will involve creating
a representer and using it to convert the resource to a string.
:param resource: Resource to convert.
:type resource: Object implementing
:class:`evererst.interfaces.IResource`.
:returns: :class:`pyramid.reposnse.Response` object or a dictionary
with a single key "context" mapped to the given resource (to be
passed on to a custom renderer).
"""
if self._convert_response:
self._update_response_body(resource)
result = self.request.response
else:
result = dict(context=resource)
return result
|
python
|
def redo(self, *args):
"""Generate listing of images that user can save."""
if not self.gui_up:
return
mod_only = self.w.modified_only.get_state()
treedict = Bunch.caselessDict()
self.treeview.clear()
self.w.status.set_text('')
channel = self.fv.get_channel(self.chname)
if channel is None:
return
# Only list modified images for saving. Scanning Datasrc is enough.
if mod_only:
all_keys = channel.datasrc.keys(sort='alpha')
# List all images in the channel.
else:
all_keys = channel.get_image_names()
# Extract info for listing and saving
for key in all_keys:
iminfo = channel.get_image_info(key)
path = iminfo.get('path')
idx = iminfo.get('idx')
t = iminfo.get('time_modified')
if path is None: # Special handling for generated buffer, eg mosaic
infile = key
is_fits = True
else:
infile = os.path.basename(path)
infile_ext = os.path.splitext(path)[1]
infile_ext = infile_ext.lower()
is_fits = False
if 'fit' in infile_ext:
is_fits = True
# Only list FITS files unless it is Ginga generated buffer
if not is_fits:
continue
# Only list modified buffers
if mod_only and t is None:
continue
# More than one ext modified, append to existing entry
if infile in treedict:
if t is not None:
treedict[infile].extlist.add(idx)
elist = sorted(treedict[infile].extlist)
treedict[infile].MODEXT = ';'.join(
map(self._format_extname, elist))
# Add new entry
else:
if t is None:
s = ''
extlist = set()
else:
s = self._format_extname(idx)
extlist = set([idx])
treedict[infile] = Bunch.Bunch(
IMAGE=infile, MODEXT=s, extlist=extlist, path=path)
self.treeview.set_tree(treedict)
# Resize column widths
n_rows = len(treedict)
if n_rows == 0:
self.w.status.set_text('Nothing available for saving')
elif n_rows < self.settings.get('max_rows_for_col_resize', 5000):
self.treeview.set_optimal_column_widths()
self.logger.debug('Resized columns for {0} row(s)'.format(n_rows))
|
python
|
def retrieve_author(id=None, username=None):
"""
Retrieve a SpigotAuthor via their id, or username.
:param id:
:param username:
:return:
"""
if id is None and username is None:
raise SpigotAuthorException("Unable to retrieve an Author without an Identifier")
if id is None:
return SpigotAuthor.from_username(username)
else:
return SpigotAuthor.from_id(id)
|
java
|
@CheckReturnValue
@BackpressureSupport(BackpressureKind.SPECIAL)
@SchedulerSupport(SchedulerSupport.NONE)
public final Disposable subscribe(Consumer<? super T> onNext, Consumer<? super Throwable> onError,
Action onComplete, Consumer<? super Subscription> onSubscribe) {
ObjectHelper.requireNonNull(onNext, "onNext is null");
ObjectHelper.requireNonNull(onError, "onError is null");
ObjectHelper.requireNonNull(onComplete, "onComplete is null");
ObjectHelper.requireNonNull(onSubscribe, "onSubscribe is null");
LambdaSubscriber<T> ls = new LambdaSubscriber<T>(onNext, onError, onComplete, onSubscribe);
subscribe(ls);
return ls;
}
|
python
|
def minimize_matrix(self):
"""
This method finds and returns the permutations that produce the lowest
ewald sum calls recursive function to iterate through permutations
"""
if self._algo == EwaldMinimizer.ALGO_FAST or \
self._algo == EwaldMinimizer.ALGO_BEST_FIRST:
return self._recurse(self._matrix, self._m_list,
set(range(len(self._matrix))))
|
python
|
def expand_templates(pars, context, return_left=False, client=False,
getenv=True, getshell=True):
"""
Render variables in context into the set of parameters with jinja2.
For variables that are not strings, nothing happens.
Parameters
----------
pars: dict
values are strings containing some jinja2 controls
context: dict
values to use while rendering
return_left: bool
whether to return the set of variables in context that were not used
in rendering parameters
Returns
-------
dict with the same keys as ``pars``, but updated values; optionally also
return set of unused parameter names.
"""
all_vars = set(context)
out = _expand(pars, context, all_vars, client, getenv, getshell)
if return_left:
return out, all_vars
return out
|
python
|
async def download_file(context, url, abs_filename, session=None, chunk_size=128):
"""Download a file, async.
Args:
context (scriptworker.context.Context): the scriptworker context.
url (str): the url to download
abs_filename (str): the path to download to
session (aiohttp.ClientSession, optional): the session to use. If
None, use context.session. Defaults to None.
chunk_size (int, optional): the chunk size to read from the response
at a time. Default is 128.
"""
session = session or context.session
loggable_url = get_loggable_url(url)
log.info("Downloading %s", loggable_url)
parent_dir = os.path.dirname(abs_filename)
async with session.get(url) as resp:
if resp.status == 404:
await _log_download_error(resp, "404 downloading %(url)s: %(status)s; body=%(body)s")
raise Download404("{} status {}!".format(loggable_url, resp.status))
elif resp.status != 200:
await _log_download_error(resp, "Failed to download %(url)s: %(status)s; body=%(body)s")
raise DownloadError("{} status {} is not 200!".format(loggable_url, resp.status))
makedirs(parent_dir)
with open(abs_filename, "wb") as fd:
while True:
chunk = await resp.content.read(chunk_size)
if not chunk:
break
fd.write(chunk)
log.info("Done")
|
python
|
def preds(self, nodeids=None):
"""
Return the Pred objects for *nodeids*, or all Preds.
Args:
nodeids: an iterable of nodeids of predications to return
Preds from; if `None`, return all Preds
"""
if nodeids is None: nodeids = self._nodeids
_eps = self._eps
return [_eps[nid][1] for nid in nodeids]
|
python
|
def get_fill_value(dataset):
"""Get the fill value of the *dataset*, defaulting to np.nan."""
if np.issubdtype(dataset.dtype, np.integer):
return dataset.attrs.get('_FillValue', np.nan)
return np.nan
|
java
|
public void createKLT() {
PkltConfig config = new PkltConfig();
config.templateRadius = 3;
config.pyramidScaling = new int[]{1,2,4,8};
tracker = FactoryPointTracker.klt(config, new ConfigGeneralDetector(600, 6, 1),
imageType, derivType);
}
|
java
|
public synchronized boolean hasNext()
{
try
{
if (!isHasCalledCheck())
{
setHasCalledCheck(true);
setHasNext(getRsAndStmt().m_rs.next());
if (!getHasNext())
{
autoReleaseDbResources();
}
}
}
catch (Exception ex)
{
setHasNext(false);
autoReleaseDbResources();
if(ex instanceof ResourceClosedException)
{
throw (ResourceClosedException)ex;
}
if(ex instanceof SQLException)
{
throw new PersistenceBrokerSQLException("Calling ResultSet.next() failed", (SQLException) ex);
}
else
{
throw new PersistenceBrokerException("Can't get next row from ResultSet", ex);
}
}
if (logger.isDebugEnabled())
logger.debug("hasNext() -> " + getHasNext());
return getHasNext();
}
|
python
|
def _file_notifier(state):
"""Notify of configuration update through file.
Arguments:
state (_WaffleState): Object that contains reference to app and its
configstore.
"""
tstamp = time.time()
state._tstamp = tstamp
conf = state.app.config
file_path = conf.get('WAFFLE_WATCHER_FILE', '/tmp/waffleconf.txt')
if not os.path.isfile(file_path):
# Create watch file
open(file_path, 'a').close()
# Update timestamp
os.utime(file_path, (tstamp, tstamp))
|
java
|
@Override
public String[] getPreDeclaredNamespaceUris() {
LinkedList<String> ll=new LinkedList<String>();
if (nss!=null) {
ll.addAll(nss.values());
}
ll.add(XSI_NS);
ll.add(DOMProcessing.XSD_NS_FOR_XML);
ll.add(XML_NS);
ll.add(PROV_NS);
//System.out.println("namespaceprefixmapper " + ll);
String[] tmp=new String[1];
return ll.toArray(tmp);
}
|
python
|
def render(cls, data={}, view_template=None, layout=None, **kwargs):
"""
To render data to the associate template file of the action view
:param data: The context data to pass to the template
:param view_template: The file template to use. By default it will map the classname/action.html
:param layout: The body layout, must contain {% include __view_template__ %}
"""
if not view_template:
stack = inspect.stack()[1]
module = inspect.getmodule(cls).__name__
module_name = module.split(".")[-1]
action_name = stack[3] # The method being called in the class
view_name = cls.__name__ # The name of the class without View
if view_name.endswith("View"):
view_name = view_name[:-4]
view_template = "%s/%s.html" % (view_name, action_name)
data = data if data else dict()
data["__"] = cls._context if cls._context else {}
if kwargs:
data.update(kwargs)
data["__view_template__"] = view_template
return render_template(layout or cls.LAYOUT, **data)
|
python
|
def netstat():
'''
Return information on open ports and states
.. note::
On BSD minions, the output contains PID info (where available) for each
netstat entry, fetched from sockstat/fstat output.
.. versionchanged:: 2014.1.4
Added support for OpenBSD, FreeBSD, and NetBSD
.. versionchanged:: 2015.8.0
Added support for SunOS
.. versionchanged:: 2016.11.4
Added support for AIX
CLI Example:
.. code-block:: bash
salt '*' network.netstat
'''
if __grains__['kernel'] == 'Linux':
if not salt.utils.path.which('netstat'):
return _ss_linux()
else:
return _netstat_linux()
elif __grains__['kernel'] in ('OpenBSD', 'FreeBSD', 'NetBSD'):
return _netstat_bsd()
elif __grains__['kernel'] == 'SunOS':
return _netstat_sunos()
elif __grains__['kernel'] == 'AIX':
return _netstat_aix()
raise CommandExecutionError('Not yet supported on this platform')
|
python
|
def scalarcoords(self):
"""A dictionary of values that don't label any axes (point-like)."""
return {k: v.values for k, v in self.coords.items() if v.dims==()}
|
java
|
public static <T extends Chunkable> List<byte[]> chunksFrom(T chunkable, int chunkLength) {
List<byte[]> chunks = new ArrayList<>();
int chunkCount = chunkCountFrom(chunkable, chunkLength);
for (int i = 0; i < chunkCount; i++) {
byte[] chunk = chunkFrom(chunkable, chunkLength, i);
chunks.add(chunk);
}
return chunks;
}
|
python
|
def msg_curse(self, args=None, max_width=None):
"""Return the dict to display in the curse interface."""
# Init the return message
ret = []
# Only process if stats exist and display plugin enable...
if not self.stats or self.is_disable():
return ret
# Max size for the interface name
name_max_width = max_width - 12
# Build the string message
# Header
msg = '{:{width}}'.format('FILE SYS', width=name_max_width)
ret.append(self.curse_add_line(msg, "TITLE"))
if args.fs_free_space:
msg = '{:>7}'.format('Free')
else:
msg = '{:>7}'.format('Used')
ret.append(self.curse_add_line(msg))
msg = '{:>7}'.format('Total')
ret.append(self.curse_add_line(msg))
# Filesystem list (sorted by name)
for i in sorted(self.stats, key=operator.itemgetter(self.get_key())):
# New line
ret.append(self.curse_new_line())
if i['device_name'] == '' or i['device_name'] == 'none':
mnt_point = i['mnt_point'][-name_max_width + 1:]
elif len(i['mnt_point']) + len(i['device_name'].split('/')[-1]) <= name_max_width - 3:
# If possible concatenate mode info... Glances touch inside :)
mnt_point = i['mnt_point'] + ' (' + i['device_name'].split('/')[-1] + ')'
elif len(i['mnt_point']) > name_max_width:
# Cut mount point name if it is too long
mnt_point = '_' + i['mnt_point'][-name_max_width + 1:]
else:
mnt_point = i['mnt_point']
msg = '{:{width}}'.format(nativestr(mnt_point),
width=name_max_width)
ret.append(self.curse_add_line(msg))
if args.fs_free_space:
msg = '{:>7}'.format(self.auto_unit(i['free']))
else:
msg = '{:>7}'.format(self.auto_unit(i['used']))
ret.append(self.curse_add_line(msg, self.get_views(item=i[self.get_key()],
key='used',
option='decoration')))
msg = '{:>7}'.format(self.auto_unit(i['size']))
ret.append(self.curse_add_line(msg))
return ret
|
java
|
public static <T> List<T> sortedCollection(Collection<T> coll) {
List<T> list = new LinkedList<T>(coll);
Collections.sort(list, new UComp<T>());
return list;
}
|
python
|
def encode_hdr(self, boundary):
"""Returns the header of the encoding of this parameter"""
boundary = encode_and_quote(boundary)
headers = ["--%s" % boundary]
if self.filename:
disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
self.filename)
else:
disposition = 'form-data; name="%s"' % self.name
headers.append("Content-Disposition: %s" % disposition)
if self.filetype:
filetype = self.filetype
else:
filetype = "text/plain; charset=utf-8"
headers.append("Content-Type: %s" % filetype)
headers.append("")
headers.append("")
return "\r\n".join(headers)
|
python
|
def _get_principal(self, principal_arn):
"""
raise ResourceNotFoundException
"""
if ':cert/' in principal_arn:
certs = [_ for _ in self.certificates.values() if _.arn == principal_arn]
if len(certs) == 0:
raise ResourceNotFoundException()
principal = certs[0]
return principal
else:
# TODO: search for cognito_ids
pass
raise ResourceNotFoundException()
|
java
|
@Deprecated
public Collection<Double> getAllKeywordValues(String keyword, SampleType type) {
if (!isLimited(keyword, type)) {
return null;
}
Collection<Double> samples = getSamples(keyword, type);
return samples == null ? null : Collections.unmodifiableCollection(samples);
}
|
python
|
def _xfer_file(self, source_file=None, source_config=None, dest_file=None, file_system=None,
TransferClass=FileTransfer):
"""Transfer file to remote device.
By default, this will use Secure Copy if self.inline_transfer is set, then will use
Netmiko InlineTransfer method to transfer inline using either SSH or telnet (plus TCL
onbox).
Return (status, msg)
status = boolean
msg = details on what happened
"""
if not source_file and not source_config:
raise ValueError("File source not specified for transfer.")
if not dest_file or not file_system:
raise ValueError("Destination file or file system not specified.")
if source_file:
kwargs = dict(ssh_conn=self.device, source_file=source_file, dest_file=dest_file,
direction='put', file_system=file_system)
elif source_config:
kwargs = dict(ssh_conn=self.device, source_config=source_config, dest_file=dest_file,
direction='put', file_system=file_system)
enable_scp = True
if self.inline_transfer:
enable_scp = False
with TransferClass(**kwargs) as transfer:
# Check if file already exists and has correct MD5
if transfer.check_file_exists() and transfer.compare_md5():
msg = "File already exists and has correct MD5: no SCP needed"
return (True, msg)
if not transfer.verify_space_available():
msg = "Insufficient space available on remote device"
return (False, msg)
if enable_scp:
transfer.enable_scp()
# Transfer file
transfer.transfer_file()
# Compares MD5 between local-remote files
if transfer.verify_file():
msg = "File successfully transferred to remote device"
return (True, msg)
else:
msg = "File transfer to remote device failed"
return (False, msg)
return (False, '')
|
python
|
def reset(self):
"""
Empties all internal storage containers
"""
super(MorseComplex, self).reset()
self.base_partitions = {}
self.merge_sequence = {}
self.persistences = []
self.max_indices = []
# State properties
self.persistence = 0.
|
python
|
def serialize(self, include_class=True, save_dynamic=False, **kwargs):
"""Serialize Singleton instance to a dictionary.
This behaves identically to HasProperties.serialize, except it also
saves the identifying name in the dictionary as well.
"""
json_dict = super(Singleton, self).serialize(
include_class=include_class,
save_dynamic=save_dynamic,
**kwargs
)
json_dict['_singleton_id'] = self._singleton_id
return json_dict
|
python
|
def setMaxDaysBack(self, maxDaysBack):
"""
what is the maximum allowed age of the results?
"""
assert isinstance(maxDaysBack, int), "maxDaysBack value has to be a positive integer"
assert maxDaysBack >= 1
self.topicPage["maxDaysBack"] = maxDaysBack
|
java
|
public void postProcessBeanFactory(final ConfigurableListableBeanFactory beanFactory) throws BeansException {
if (beanFactory == null) {
return;
}
String[] beanNames = beanFactory.getBeanDefinitionNames();
int singletonBeanCount = 0;
for (int i = 0; i < beanNames.length; i++) {
// using beanDefinition to check singleton property because when
// accessing through
// context (applicationContext.isSingleton(beanName)), bean will be
// created already,
// possibly bypassing other BeanFactoryPostProcessors
BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanNames[i]);
if (beanDefinition.isSingleton()) {
singletonBeanCount++;
}
}
this.progressMonitor.taskStarted(this.loadingAppContextMessage, singletonBeanCount);
beanFactory.addBeanPostProcessor(new ProgressMonitoringBeanPostProcessor(beanFactory));
}
|
java
|
@Override
public CPDefinitionVirtualSetting findByC_C(long classNameId, long classPK)
throws NoSuchCPDefinitionVirtualSettingException {
CPDefinitionVirtualSetting cpDefinitionVirtualSetting = fetchByC_C(classNameId,
classPK);
if (cpDefinitionVirtualSetting == null) {
StringBundler msg = new StringBundler(6);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("classNameId=");
msg.append(classNameId);
msg.append(", classPK=");
msg.append(classPK);
msg.append("}");
if (_log.isDebugEnabled()) {
_log.debug(msg.toString());
}
throw new NoSuchCPDefinitionVirtualSettingException(msg.toString());
}
return cpDefinitionVirtualSetting;
}
|
java
|
public String generateNestedEvent() throws Exception
{
BenchmarkEvent nestedDims1 = new BenchmarkEvent(
null,
String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), null, null, null, null,
null, null, null, null,
null, null, null, null,
null, null, null
);
String[] dimsArray1 = {String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt())};
BenchmarkEvent nestedDims2 = new BenchmarkEvent(
null,
null, null, String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()),
null, null, null, null,
null, null, null, null,
dimsArray1, null, null
);
Long[] metricsArray1 = {rng.nextLong(), rng.nextLong(), rng.nextLong(), rng.nextLong()};
BenchmarkEvent nestedMetrics1 = new BenchmarkEvent(
null,
null, null, null, null, null, null,
rng.nextLong(), rng.nextDouble(), rng.nextDouble(), rng.nextLong(),
null, null, null, null,
null, metricsArray1, null
);
BenchmarkEvent nestedMetrics2 = new BenchmarkEvent(
null,
null, null, null, null, null, null,
null, null, null, rng.nextLong(),
null, null, null, null,
null, null, null
);
BenchmarkEvent metricsWrapper = new BenchmarkEvent(
null,
null, null, null, null, null, null,
null, null, null, null,
null, null, null, nestedMetrics2,
null, null, null
);
//nest some dimensions in an array!
BenchmarkEvent arrayNestedDim1 = new BenchmarkEvent(
null,
String.valueOf(rng.nextInt()), null, null, null, null, null,
null, null, null, null,
null, null, null, null,
null, null, null
);
BenchmarkEvent arrayNestedDim2 = new BenchmarkEvent(
null,
String.valueOf(rng.nextInt()), null, null, null, null, null,
null, null, null, null,
null, null, null, null,
null, null, null
);
BenchmarkEvent arrayNestedDim3 = new BenchmarkEvent(
null,
null, String.valueOf(rng.nextInt()), null, null, null, null,
null, null, null, null,
null, null, null, null,
null, null, null
);
BenchmarkEvent arrayNestedWrapper = new BenchmarkEvent(
null,
null, null, null, null, null, null,
null, null, null, null,
arrayNestedDim3, null, null, null,
null, null, null
);
BenchmarkEvent[] eventArray = {arrayNestedDim1, arrayNestedDim2, arrayNestedWrapper};
Long[] ignoredMetrics = {Long.valueOf(10), Long.valueOf(20), Long.valueOf(30)};
BenchmarkEvent wrapper = new BenchmarkEvent(
DEFAULT_TIMESTAMP,
String.valueOf(rng.nextInt()), String.valueOf(rng.nextInt()), null, null, null, null,
null, null, rng.nextDouble(), rng.nextLong(),
nestedDims1, nestedDims2, nestedMetrics1, metricsWrapper,
null, ignoredMetrics, eventArray
);
return mapper.writeValueAsString(wrapper);
}
|
java
|
public EClass getIfcMetric() {
if (ifcMetricEClass == null) {
ifcMetricEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI)
.getEClassifiers().get(322);
}
return ifcMetricEClass;
}
|
java
|
public static nat64[] get(nitro_service service) throws Exception{
nat64 obj = new nat64();
nat64[] response = (nat64[])obj.get_resources(service);
return response;
}
|
python
|
def set_content_model(self):
"""
Set content_model to the child class's related name, or None if this is
the base class.
"""
if not self.content_model:
is_base_class = (
base_concrete_model(ContentTyped, self) == self.__class__)
self.content_model = (
None if is_base_class else self.get_content_model_name())
|
python
|
def initialize(self, gyro_rate, slices=None, skip_estimation=False):
"""Prepare calibrator for calibration
This method does three things:
1. Create slices from the video stream, if not already provided
2. Estimate time offset
3. Estimate rotation between camera and gyroscope
Parameters
------------------
gyro_rate : float
Estimated gyroscope sample rate
slices : list of Slice, optional
Slices to use for optimization
skip_estimation : bool
Do not estimate initial time offset and rotation.
Raises
--------------------
InitializationError
If the initialization fails
"""
self.params['user']['gyro_rate'] = gyro_rate
for p in ('gbias_x', 'gbias_y', 'gbias_z'):
self.params['initialized'][p] = 0.0
if slices is not None:
self.slices = slices
if self.slices is None:
self.slices = videoslice.Slice.from_stream_randomly(self.video)
logger.debug("Number of slices: {:d}".format(len(self.slices)))
if len(self.slices) < 2:
logger.error("Calibration requires at least 2 video slices to proceed, got %d", len(self.slices))
raise InitializationError("Calibration requires at least 2 video slices to proceed, got {:d}".format(len(self.slices)))
if not skip_estimation:
time_offset = self.find_initial_offset()
# TODO: Detect when time offset initialization fails, and raise InitializationError
R = self.find_initial_rotation()
if R is None:
raise InitializationError("Failed to calculate initial rotation")
|
python
|
async def set_chat_description(self, chat_id: typing.Union[base.Integer, base.String],
description: typing.Union[base.String, None] = None) -> base.Boolean:
"""
Use this method to change the description of a supergroup or a channel.
The bot must be an administrator in the chat for this to work and must have the appropriate admin rights.
Source: https://core.telegram.org/bots/api#setchatdescription
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param description: New chat description, 0-255 characters
:type description: :obj:`typing.Union[base.String, None]`
:return: Returns True on success
:rtype: :obj:`base.Boolean`
"""
payload = generate_payload(**locals())
result = await self.request(api.Methods.SET_CHAT_DESCRIPTION, payload)
return result
|
java
|
public void marshall(EnableDirectoryRequest enableDirectoryRequest, ProtocolMarshaller protocolMarshaller) {
if (enableDirectoryRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(enableDirectoryRequest.getDirectoryArn(), DIRECTORYARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def set(self, instance, value, **kwargs):
"""
Check if value is an actual date/time value. If not, attempt
to convert it to one; otherwise, set to None. Assign all
properties passed as kwargs to object.
"""
val = get_date(instance, value)
super(DateTimeField, self).set(instance, val, **kwargs)
|
python
|
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
r"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a DSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. DSON-RPC class hinting).
``object_pairs_hook`` is an optional function that will be called with the
result of any object literal decoded with an ordered list of pairs. The
return value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes priority.
``parse_float``, if specified, will be called with the string
of every DSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for DSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every DSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for DSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid DSON numbers
are encountered.
To use a custom ``DSONDecoder`` subclass, specify it with the ``cls``
kwarg; otherwise ``DSONDecoder`` is used.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = DSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
|
java
|
private void initialize() {
this.setTitle(Constant.messages.getString("edit.find.title"));
this.infoLabel = new JLabel(Constant.messages.getString("edit.find.label.notfound"));
this.infoLabel.setVisible(false);
this.setContentPane(getJPanel());
centreDialog();
txtFind.requestFocus();
this.getRootPane().setDefaultButton(btnFind);
pack();
this.setVisible(true);
}
|
java
|
public boolean remove(long key) {
for (Map<Long, Long> m : buckets) {
if (m.remove(key) != null) {
return true;
}
}
return false;
}
|
java
|
public SIBusMessage nextLocked()
throws SISessionUnavailableException, SISessionDroppedException,
SIResourceException, SIConnectionLostException,
SIErrorException, SIIncorrectCallException
{
if (TraceComponent.isAnyTracingEnabled() && CoreSPILockedMessageEnumeration.tc.isEntryEnabled())
SibTr.entry(CoreSPILockedMessageEnumeration.tc, "nextLocked",
new Object[] {new Integer(hashCode()), this});
JsMessage jsMsg = null;
JsMessageWrapper msg = null;
LMEMessage dirtyMessage = null;
boolean removeMsg = false;
checkValidState("nextLocked");
localConsumerPoint.checkNotClosed();
// Make any list modifications under the lock
synchronized(this)
{
// If we still have an unlocked message in the list remove it now
if(currentUnlockedMessage != null)
{
removeMessage(currentUnlockedMessage);
currentUnlockedMessage = null;
}
if(currentMsg != null)
currentMsg = currentMsg.next;
else if(!endReached)
currentMsg = firstMsg;
if(currentMsg != null)
{
msg = currentMsg.message;
jsMsg = currentMsg.jsMessage;
// Due to the slight dodgyness of setting jsMsg (see comment on dirtyMessage)
// this tries to ensure our logic is correct
if((jsMsg != null) && !currentMsg.wasRead)
{
SIErrorException e =
new SIErrorException(nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0001",
new Object[] {
"com.ibm.ws.sib.processor.impl.AbstractLockedMessageEnumeration",
"1:1318:1.154.3.1" },
null));
if (TraceComponent.isAnyTracingEnabled() && CoreSPILockedMessageEnumeration.tc.isEntryEnabled())
SibTr.exit(CoreSPILockedMessageEnumeration.tc, "nextLocked", e);
throw e;
}
// If the message was added to the message store but the consumer does
// not require recovery we can delete the message before we return it.
// We don't want to do it here under the lock, so we remember to do it
// later but pretend we've already done it by the time we release the
// lock - that way no-one else will think its in the message store when it
// isn't.
if(currentMsg.isStored && !currentMsg.isRecoverable)
{
removeMsg = true;
currentMsg.isStored = false;
}
// We're about to release the list lock but we need to be able to set
// the jsMessage back into the LMEMessage object later so we save a
// pointer to it. BUT there is a slight possibility that by the time
// we write the jsMessage into it the LMEMessage has been deleted from
// the list (e.g. by an unlockAll() from the consumer), that's not a
// problem in itself. BUT by deleting it from the list we actually can
// put it into the set of pooled objects so you could be writing over
// another message BUT we currently hold the AsynchConsumerLock and we
// know no new messages can be added to the list unless they hold that
// lock SO we may have moved into the pool but we couldn't have been
// re-used SO we're safe!
dirtyMessage = currentMsg;
currentMsg.wasRead = true;
//indicate that there is a message under the cursor
messageAvailable = true;
}
else
{
messageAvailable = false;
endReached = true;
}
} // synchronized
//check it isn't null
if((msg != null) && (jsMsg == null))
{
jsMsg = setPropertiesInMessage(currentMsg);
if(removeMsg)
{
try
{
removeMessageFromStore(msg,
null,
false); // false = DON'T decrement active message count
// (wait until the message is deleted/unlocked from the LME)
}
catch (SIMPMessageNotLockedException e)
{
// SIMPMessageNotLockedException shouldn't occur so FFDC.
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.AbstractLockedMessageEnumeration.nextLocked",
"1:1396:1.154.3.1",
this);
SibTr.exception(tc, e);
SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {
"com.ibm.ws.sib.processor.impl.AbstractLockedMessageEnumeration",
"1:1403:1.154.3.1",
e });
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "nextLocked", e);
throw new SIResourceException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {
"com.ibm.ws.sib.processor.impl.AbstractLockedMessageEnumeration",
"1:1414:1.154.3.1",
e },
null),
e);
}
}
dirtyMessage.jsMessage = jsMsg; // Read earlier comment before touching this!
}
if (TraceComponent.isAnyTracingEnabled() && UserTrace.tc_mt.isDebugEnabled())
UserTrace.trace_Receive(null,
jsMsg,
consumerSession.getDestinationAddress(),
consumerSession.getIdInternal());
if (TraceComponent.isAnyTracingEnabled() && CoreSPILockedMessageEnumeration.tc.isEntryEnabled())
SibTr.exit(CoreSPILockedMessageEnumeration.tc, "nextLocked", new Object[] {jsMsg});
//return the message or null
return jsMsg;
}
|
java
|
@Override
public Scan open() {
TempTable tt = copyRecordsFrom(rhs);
TableInfo ti = tt.getTableInfo();
Scan leftscan = lhs.open();
return new MultiBufferProductScan(leftscan, ti, tx);
}
|
java
|
protected boolean isPermissionDenied(List<String> output){
return output.stream().anyMatch(s -> s != null && s.contains(PERMISSION_DENIED_MESSAGE));
}
|
java
|
public void executeUpdate(ClassDescriptor cld, Object obj) throws PersistenceBrokerException
{
if (logger.isDebugEnabled())
{
logger.debug("executeUpdate: " + obj);
}
// obj with nothing but key fields is not updated
if (cld.getNonPkRwFields().length == 0)
{
return;
}
final StatementManagerIF sm = broker.serviceStatementManager();
PreparedStatement stmt = null;
// BRJ: preserve current locking values
// locking values will be restored in case of exception
ValueContainer[] oldLockingValues;
oldLockingValues = cld.getCurrentLockingValues(obj);
try
{
stmt = sm.getUpdateStatement(cld);
if (stmt == null)
{
logger.error("getUpdateStatement returned a null statement");
throw new PersistenceBrokerException("getUpdateStatement returned a null statement");
}
sm.bindUpdate(stmt, cld, obj);
if (logger.isDebugEnabled())
logger.debug("executeUpdate: " + stmt);
if ((stmt.executeUpdate() == 0) && cld.isLocking()) //BRJ
{
/**
* Kuali Foundation modification -- 6/19/2009
*/
String objToString = "";
try {
objToString = obj.toString();
} catch (Exception ex) {}
throw new OptimisticLockException("Object has been modified by someone else: " + objToString, obj);
/**
* End of Kuali Foundation modification
*/
}
// Harvest any return values.
harvestReturnValues(cld.getUpdateProcedure(), obj, stmt);
}
catch (OptimisticLockException e)
{
// Don't log as error
if (logger.isDebugEnabled())
logger.debug(
"OptimisticLockException during the execution of update: " + e.getMessage(),
e);
throw e;
}
catch (PersistenceBrokerException e)
{
// BRJ: restore old locking values
setLockingValues(cld, obj, oldLockingValues);
logger.error(
"PersistenceBrokerException during the execution of the update: " + e.getMessage(),
e);
throw e;
}
catch (SQLException e)
{
final String sql = broker.serviceSqlGenerator().getPreparedUpdateStatement(cld).getStatement();
throw ExceptionHelper.generateException(e, sql, cld, logger, obj);
}
finally
{
sm.closeResources(stmt, null);
}
}
|
python
|
def convert_namespaces_str(
bel_str: str,
api_url: str = None,
namespace_targets: Mapping[str, List[str]] = None,
canonicalize: bool = False,
decanonicalize: bool = False,
) -> str:
"""Convert namespace in string
Uses a regex expression to extract all NSArgs and replace them with the
updated NSArg from the BEL.bio API terms endpoint.
Args:
bel_str (str): bel statement string or partial string (e.g. subject or object)
api_url (str): BEL.bio api url to use, e.g. https://api.bel.bio/v1
namespace_targets (Mapping[str, List[str]]): formatted as in configuration file example
canonicalize (bool): use canonicalize endpoint/namespace targets
decanonicalize (bool): use decanonicalize endpoint/namespace targets
Results:
str: bel statement with namespaces converted
"""
# pattern - look for capitalized namespace followed by colon
# and either a quoted string or a string that
# can include any char other than space, comma or ')'
matches = re.findall(r'([A-Z]+:"(?:\\.|[^"\\])*"|[A-Z]+:(?:[^\),\s]+))', bel_str)
for nsarg in matches:
if "DEFAULT:" in nsarg: # skip default namespaces
continue
updated_nsarg = convert_nsarg(
nsarg,
api_url=api_url,
namespace_targets=namespace_targets,
canonicalize=canonicalize,
decanonicalize=decanonicalize,
)
if updated_nsarg != nsarg:
bel_str = bel_str.replace(nsarg, updated_nsarg)
return bel_str
|
java
|
public CreateUserPoolClientRequest withLogoutURLs(String... logoutURLs) {
if (this.logoutURLs == null) {
setLogoutURLs(new java.util.ArrayList<String>(logoutURLs.length));
}
for (String ele : logoutURLs) {
this.logoutURLs.add(ele);
}
return this;
}
|
java
|
public void updateUI() {
if (!Util.empty(panelContent.getText())) {
panelContentRO.setData(panelContent.getData());
} else {
panelContentRO.setText(SAMPLE_CONTENT);
}
panel.setType((WPanel.Type) panelType.getSelected());
String headingText = tfHeading.getText();
if (!Util.empty(tfHeading.getText())) {
heading.setText(tfHeading.getText());
panel.setTitleText(headingText);
} else {
heading.setText(SAMPLE_HEADER);
panel.setTitleText(SAMPLE_TITLE_TEXT);
}
}
|
java
|
public static Vector getQNameProperties(String key, Properties props)
{
String s = props.getProperty(key);
if (null != s)
{
Vector v = new Vector();
int l = s.length();
boolean inCurly = false;
FastStringBuffer buf = new FastStringBuffer();
// parse through string, breaking on whitespaces. I do this instead
// of a tokenizer so I can track whitespace inside of curly brackets,
// which theoretically shouldn't happen if they contain legal URLs.
for (int i = 0; i < l; i++)
{
char c = s.charAt(i);
if (Character.isWhitespace(c))
{
if (!inCurly)
{
if (buf.length() > 0)
{
QName qname = QName.getQNameFromString(buf.toString());
v.addElement(qname);
buf.reset();
}
continue;
}
}
else if ('{' == c)
inCurly = true;
else if ('}' == c)
inCurly = false;
buf.append(c);
}
if (buf.length() > 0)
{
QName qname = QName.getQNameFromString(buf.toString());
v.addElement(qname);
buf.reset();
}
return v;
}
else
return null;
}
|
java
|
public SDVariable mul(SDVariable x) {
return mul(sameDiff.generateNewVarName(MulOp.OP_NAME,0),x);
}
|
java
|
public void beginFailover(String resourceGroupName, String serverName, String databaseName, String linkId) {
beginFailoverWithServiceResponseAsync(resourceGroupName, serverName, databaseName, linkId).toBlocking().single().body();
}
|
python
|
def conics(elts, et):
"""
Determine the state (position, velocity) of an orbiting body
from a set of elliptic, hyperbolic, or parabolic orbital
elements.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/conics_c.html
:param elts: Conic elements.
:type elts: 8-Element Array of floats
:param et: Input time.
:type et: float
:return: State of orbiting body at et.
:rtype: 6-Element Array of floats
"""
elts = stypes.toDoubleVector(elts)
et = ctypes.c_double(et)
state = stypes.emptyDoubleVector(6)
libspice.conics_c(elts, et, state)
return stypes.cVectorToPython(state)
|
python
|
def ghz_circuit(qubits: Qubits) -> Circuit:
"""Returns a circuit that prepares a multi-qubit Bell state from the zero
state.
"""
circ = Circuit()
circ += H(qubits[0])
for q0 in range(0, len(qubits)-1):
circ += CNOT(qubits[q0], qubits[q0+1])
return circ
|
java
|
private void remove(Integer layer, Refreshable refreshable)
{
final Collection<Refreshable> refreshables = getLayer(layer);
refreshables.remove(refreshable);
if (refreshables.isEmpty())
{
indexs.remove(layer);
}
}
|
java
|
@SuppressWarnings("unchecked")
public static List<PendingMessage> parseRange(List<?> xpendingOutput) {
LettuceAssert.notNull(xpendingOutput, "XPENDING output must not be null");
List<PendingMessage> result = new ArrayList<>();
for (Object element : xpendingOutput) {
LettuceAssert.isTrue(element instanceof List, "Output elements must be a List");
List<Object> message = (List) element;
String messageId = (String) message.get(0);
String consumer = (String) message.get(1);
Long msSinceLastDelivery = (Long) message.get(2);
Long deliveryCount = (Long) message.get(3);
result.add(new PendingMessage(messageId, consumer, msSinceLastDelivery, deliveryCount));
}
return result;
}
|
java
|
public LoadBalancerInner updateTags(String resourceGroupName, String loadBalancerName, Map<String, String> tags) {
return updateTagsWithServiceResponseAsync(resourceGroupName, loadBalancerName, tags).toBlocking().last().body();
}
|
python
|
def refresh_token(self, headers=None, **kwargs):
"""
Request a refreshed token
"""
self._check_configuration("site", "token_url", "client_id",
"client_secret")
url = "%s%s" % (self.site, quote(self.token_url))
data = {
'client_id': self.client_id,
'client_secret': self.client_secret,
}
data.update(kwargs)
return self._make_request(url, data=data, headers=headers)
|
java
|
public short[] getStrictDurations() {
Vector durations = new Vector();
short currentDuration = 0;
for (int i=0; i<m_notes.size(); i++) {
currentDuration = ((Note)(m_notes.elementAt(i))).getStrictDuration();
if (durations.indexOf(new Short(currentDuration))==-1)
durations.addElement(currentDuration);
}
if (durations.size()==0)
return null;
else {
//sort the durations
Vector sortedDurations = new Vector();
for (int i=0; i<durations.size(); i++) {
int j=0;
while (j<sortedDurations.size()
&& (Short) sortedDurations.elementAt(j) <
((Short) durations.elementAt(i))
)
j++;
sortedDurations.insertElementAt(durations.elementAt(i),j);
}
short[] durationsAsArray = new short[sortedDurations.size()];
for (int i=0; i<sortedDurations.size(); i++)
durationsAsArray [i] = (Short) sortedDurations.elementAt(i);
return durationsAsArray;
}
}
|
python
|
def object_download(self, bucket, key, start_offset=0, byte_count=None):
"""Reads the contents of an object as text.
Args:
bucket: the name of the bucket containing the object.
key: the key of the object to be read.
start_offset: the start offset of bytes to read.
byte_count: the number of bytes to read. If None, it reads to the end.
Returns:
The text content within the object.
Raises:
Exception if the object could not be read from.
"""
args = {'alt': 'media'}
headers = {}
if start_offset > 0 or byte_count is not None:
header = 'bytes=%d-' % start_offset
if byte_count is not None:
header += '%d' % byte_count
headers['Range'] = header
url = Api._DOWNLOAD_ENDPOINT + (Api._OBJECT_PATH % (bucket, Api._escape_key(key)))
return google.datalab.utils.Http.request(url, args=args, headers=headers,
credentials=self._credentials, raw_response=True)
|
python
|
def load(fname):
"""Load an embedding dump generated by `save`"""
content = _open(fname).read()
if PY2:
state = pickle.loads(content)
else:
state = pickle.loads(content, encoding='latin1')
voc, vec = state
if len(voc) == 2:
words, counts = voc
word_count = dict(zip(words, counts))
vocab = CountedVocabulary(word_count=word_count)
else:
vocab = OrderedVocabulary(voc)
return Embedding(vocabulary=vocab, vectors=vec)
|
python
|
def simxGetBooleanParameter(clientID, paramIdentifier, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
paramValue = ct.c_ubyte()
return c_GetBooleanParameter(clientID, paramIdentifier, ct.byref(paramValue), operationMode), bool(paramValue.value!=0)
|
java
|
@Override protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (noxItemCatalog == null) {
wasInvalidatedBefore = false;
return;
}
updateShapeOffset();
for (int i = 0; i < noxItemCatalog.size(); i++) {
if (shape.isItemInsideView(i)) {
loadNoxItem(i);
float left = shape.getXForItemAtPosition(i);
float top = shape.getYForItemAtPosition(i);
drawNoxItem(canvas, i, left, top);
}
}
canvas.restore();
wasInvalidatedBefore = false;
}
|
python
|
def guess_content_kind(path=None, web_video_data=None, questions=None):
""" guess_content_kind: determines what kind the content is
Args:
files (str or list): files associated with content
Returns: string indicating node's kind
"""
# If there are any questions, return exercise
if questions and len(questions) > 0:
return content_kinds.EXERCISE
# See if any files match a content kind
if path:
ext = os.path.splitext(path)[1][1:].lower()
if ext in content_kinds.MAPPING:
return content_kinds.MAPPING[ext]
raise InvalidFormatException("Invalid file type: Allowed formats are {0}".format([key for key, value in content_kinds.MAPPING.items()]))
elif web_video_data:
return content_kinds.VIDEO
else:
return content_kinds.TOPIC
|
java
|
public Observable<ServiceResponse<Void>> addVideoFrameWithServiceResponseAsync(String teamName, String reviewId, Integer timescale) {
if (this.client.baseUrl() == null) {
throw new IllegalArgumentException("Parameter this.client.baseUrl() is required and cannot be null.");
}
if (teamName == null) {
throw new IllegalArgumentException("Parameter teamName is required and cannot be null.");
}
if (reviewId == null) {
throw new IllegalArgumentException("Parameter reviewId is required and cannot be null.");
}
String parameterizedHost = Joiner.on(", ").join("{baseUrl}", this.client.baseUrl());
return service.addVideoFrame(teamName, reviewId, timescale, this.client.acceptLanguage(), parameterizedHost, this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = addVideoFrameDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
java
|
public MultiChangeBuilder<PS, SEG, S> insertText(int position, String text) {
return replaceText(position, position, text);
}
|
python
|
def __declare(self, *facts):
"""
Internal declaration method. Used for ``declare`` and ``deffacts``
"""
if any(f.has_field_constraints() for f in facts):
raise TypeError(
"Declared facts cannot contain conditional elements")
elif any(f.has_nested_accessor() for f in facts):
raise KeyError(
"Cannot declare facts containing double underscores as keys.")
else:
last_inserted = None
for fact in facts:
last_inserted = self.facts.declare(fact)
if not self.running:
added, removed = self.get_activations()
self.strategy.update_agenda(self.agenda, added, removed)
return last_inserted
|
python
|
def schedule(self, task: Schedulable, *args, **kwargs):
"""Schedule a job to be executed as soon as possible.
:arg task: the task or its name to execute in the background
:arg args: args to be passed to the task function
:arg kwargs: kwargs to be passed to the task function
This method can only be used once tasks have been attached to a
Spinach :class:`Engine`.
"""
self._require_attached_tasks()
self._spin.schedule(task, *args, **kwargs)
|
java
|
public CharTrie addAlphabet(String document) {
document.chars().mapToObj(i -> new String(Character.toChars(i))).forEach(s -> addDocument(s));
return this;
}
|
java
|
private static void printVersion() {
System.out.println("JNRPE version " + VERSION);
System.out.println("Copyright (c) 2011 Massimiliano Ziccardi");
System.out.println("Licensed under the Apache License, Version 2.0");
System.out.println();
}
|
java
|
static ContentCryptoMaterial create(SecretKey cek, byte[] iv,
EncryptionMaterials kekMaterials,
S3CryptoScheme scheme,
CryptoConfiguration config, AWSKMS kms,
AmazonWebServiceRequest req) {
return doCreate(cek, iv, kekMaterials, scheme.getContentCryptoScheme(),
scheme, config, kms, req);
}
|
python
|
def ref(self, ref):
"""Get a reference pointed to by ``ref``.
The most common will be branches and tags. For a branch, you must
specify 'heads/branchname' and for a tag, 'tags/tagname'. Essentially,
the system should return any reference you provide it in the namespace,
including notes and stashes (provided they exist on the server).
:param str ref: (required)
:returns: :class:`Reference <github3.git.Reference>`
"""
json = None
if ref:
url = self._build_url('git', 'refs', ref, base_url=self._api)
json = self._json(self._get(url), 200)
return Reference(json, self) if json else None
|
java
|
public JBBPTextWriter Double(final double value) throws IOException {
ensureValueMode();
String convertedByExtras = null;
for (final Extra e : this.extras) {
convertedByExtras = e.doConvertDoubleToStr(this, value);
if (convertedByExtras != null) {
break;
}
}
if (convertedByExtras == null) {
final double valueToWrite;
if (this.byteOrder == JBBPByteOrder.LITTLE_ENDIAN) {
valueToWrite = Double.longBitsToDouble(JBBPFieldLong.reverseBits(Double.doubleToLongBits(value)));
} else {
valueToWrite = value;
}
printValueString(JBBPUtils.ensureMinTextLength(JBBPUtils.double2str(valueToWrite, this.radix), this.maxCharsRadixForShort, '0', 0));
} else {
printValueString(convertedByExtras);
}
return this;
}
|
java
|
public boolean isBlock() throws SftpStatusException, SshException {
// This is long hand because gcj chokes when it is not? Investigate why
if ((getAttributes().getPermissions().longValue() & SftpFileAttributes.S_IFBLK) == SftpFileAttributes.S_IFBLK) {
return true;
}
return false;
}
|
python
|
def getEngineRoot(self):
"""
Returns the root directory location of the latest installed version of UE4
"""
if not hasattr(self, '_engineRoot'):
self._engineRoot = self._getEngineRoot()
return self._engineRoot
|
python
|
def setnx(self, key, value):
"""Set the value of a key, only if the key does not exist."""
fut = self.execute(b'SETNX', key, value)
return wait_convert(fut, bool)
|
java
|
public void marshall(ListTerminologiesRequest listTerminologiesRequest, ProtocolMarshaller protocolMarshaller) {
if (listTerminologiesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listTerminologiesRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listTerminologiesRequest.getMaxResults(), MAXRESULTS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
@SuppressWarnings("unchecked")
public static <T extends SpecificRecord> TypeInformation<Row> convertToTypeInfo(Class<T> avroClass) {
Preconditions.checkNotNull(avroClass, "Avro specific record class must not be null.");
// determine schema to retrieve deterministic field order
final Schema schema = SpecificData.get().getSchema(avroClass);
return (TypeInformation<Row>) convertToTypeInfo(schema);
}
|
python
|
def _mk_call_init(class_):
"""Create an __init__ function for a call type instruction.
Parameters
----------
class_ : type
The type to bind the function to.
Returns
-------
__init__ : callable
The __init__ method for the class.
"""
def __init__(self, packed=no_default, *, positional=0, keyword=0):
if packed is no_default:
arg = int.from_bytes(bytes((positional, keyword)), 'little')
elif not positional and not keyword:
arg = packed
else:
raise TypeError('cannot specify packed and unpacked arguments')
self.positional, self.keyword = arg.to_bytes(2, 'little')
super(class_, self).__init__(arg)
return __init__
|
java
|
public static String quote(Iterable<String> ids) {
StringBuilder builder = new StringBuilder();
for (Iterator<String> iterator = ids.iterator(); iterator.hasNext(); ) {
String id = iterator.next();
builder.append(quote(id));
if (iterator.hasNext()) {
builder.append(",");
}
}
return builder.toString();
}
|
java
|
private void evict(Node candidate) {
if (data.size() > maximumSize) {
List<Node> sample = (policy == EvictionPolicy.RANDOM)
? Arrays.asList(table)
: sampleStrategy.sample(table, candidate, sampleSize, random, policyStats);
Node victim = policy.select(sample, random, tick);
policyStats.recordEviction();
if (admittor.admit(candidate.key, victim.key)) {
removeFromTable(victim);
data.remove(victim.key);
} else {
removeFromTable(candidate);
data.remove(candidate.key);
}
}
}
|
java
|
private synchronized String ufsFingerprint(long fileId) throws IOException {
FileInfo fileInfo = mBlockWorker.getFileInfo(fileId);
String dstPath = fileInfo.getUfsPath();
try (CloseableResource<UnderFileSystem> ufsResource =
mUfsManager.get(fileInfo.getMountId()).acquireUfsResource()) {
UnderFileSystem ufs = ufsResource.get();
return ufs.isFile(dstPath) ? ufs.getFingerprint(dstPath) : null;
}
}
|
java
|
private int getMax(double data[]) {
int idx = 0;
double max = data[0];
for (int i = 1; i < data.length; i++) {
if (max < data[i]) {
max = data[i];
idx = i;
}
}
return idx;
}
|
python
|
def set_assessment(self, assessment_id=None):
"""Sets the assessment.
arg: assessmentId (osid.id.Id): the new assessment
raise: INVALID_ARGUMENT - assessmentId is invalid
raise: NoAccess - assessmentId cannot be modified
raise: NullArgument - assessmentId is null
compliance: mandatory - This method must be implemented.
"""
if assessment_id is None:
raise NullArgument()
metadata = Metadata(**settings.METADATA['assessment_id'])
if metadata.is_read_only():
raise NoAccess()
if self._is_valid_input(assessment_id, metadata, array=False):
self._my_map['assessmentId'] = str(assessment_id)
else:
raise InvalidArgument
|
python
|
def prepend_elements(self, elements):
"""
Prepends more elements to the contained internal elements.
"""
self._elements = list(elements) + self._elements
self._on_element_change()
|
python
|
def _update_return_dict(ret, success, data, errors=None, warnings=None):
'''
PRIVATE METHOD
Updates the return dictionary and returns it.
ret : dict<str,obj>
The original return dict to update. The ret param should have
been created from _get_return_dict()
success : boolean (True)
True indicates a successful result.
data : dict<str,obj> ({})
Data to be returned to the caller.
errors : list<str> ([()])
A list of error messages to be returned to the caller
warnings : list<str> ([])
A list of warnings to be returned to the caller.
Return: dict<str,obj>::
{'success':boolean, 'data':dict, 'errors':list, 'warnings':list}
'''
errors = [] if errors is None else errors
warnings = [] if warnings is None else warnings
ret['success'] = success
ret['data'].update(data)
ret['errors'] = ret['errors'] + errors
ret['warnings'] = ret['warnings'] + warnings
return ret
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.