language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | private void reportOnTomcatInformations() { // NOPMD
// CHECKSTYLE:ON
final Map<String, TomcatInformations> tcInfos = new LinkedHashMap<String, TomcatInformations>();
for (final TomcatInformations tcInfo : javaInformations.getTomcatInformationsList()) {
if (tcInfo.getRequestCount() > 0) {
final String fields = "{tomcat_name=\"" + sanitizeName(tcInfo.getName()) + "\"}";
tcInfos.put(fields, tcInfo);
}
}
if (tcInfos.isEmpty()) {
return;
}
printHeader(MetricType.GAUGE, "tomcat_threads_max", "tomcat max threads");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_threads_max", entry.getKey(),
entry.getValue().getMaxThreads());
}
printHeader(MetricType.GAUGE, "tomcat_thread_busy_count", "tomcat currently busy threads");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_thread_busy_count", entry.getKey(),
entry.getValue().getCurrentThreadsBusy());
}
printHeader(MetricType.COUNTER, "tomcat_received_bytes", "tomcat total received bytes");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_received_bytes", entry.getKey(),
entry.getValue().getBytesReceived());
}
printHeader(MetricType.COUNTER, "tomcat_sent_bytes", "tomcat total sent bytes");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_sent_bytes", entry.getKey(),
entry.getValue().getBytesSent());
}
printHeader(MetricType.COUNTER, "tomcat_request_count", "tomcat total request count");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_request_count", entry.getKey(),
entry.getValue().getRequestCount());
}
printHeader(MetricType.COUNTER, "tomcat_error_count", "tomcat total error count");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_error_count", entry.getKey(),
entry.getValue().getErrorCount());
}
printHeader(MetricType.COUNTER, "tomcat_processing_time_millis",
"tomcat total processing time");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_processing_time_millis", entry.getKey(),
entry.getValue().getProcessingTime());
}
printHeader(MetricType.GAUGE, "tomcat_max_time_millis",
"tomcat max time for single request");
for (final Map.Entry<String, TomcatInformations> entry : tcInfos.entrySet()) {
printLongWithFields("tomcat_max_time_millis", entry.getKey(),
entry.getValue().getMaxTime());
}
} |
java | protected synchronized void sendCommandToShard(VoidMessage message) {
// if this node is shard - we just step over TCP/IP infrastructure
// TODO: we want LocalTransport to be used in such cases
if (nodeRole == NodeRole.SHARD) {
message.setTargetId(shardIndex);
messages.add(message);
return;
}
//log.info("Sending CS: {}", message.getClass().getCanonicalName());
message.setTargetId(targetIndex);
DirectBuffer buffer = message.asUnsafeBuffer();
long result = publicationForShards.offer(buffer);
if (result < 0)
for (int i = 0; i < 5 && result < 0; i++) {
try {
// TODO: make this configurable
Thread.sleep(1000);
} catch (Exception e) {
}
result = publicationForShards.offer(buffer);
}
// TODO: handle retransmit & backpressure separately
if (result < 0)
throw new RuntimeException("Unable to send message over the wire. Error code: " + result);
} |
python | def _set_internal_value(self, new_internal_value):
"""
This is supposed to be only used by fitting engines
:param new_internal_value: new value in internal representation
:return: none
"""
if new_internal_value != self._internal_value:
self._internal_value = new_internal_value
# Call callbacks if any
for callback in self._callbacks:
callback(self) |
python | def reset(self, path, pretend=False):
"""
Rolls all of the currently applied migrations back.
:param path: The path
:type path: str
:param pretend: Whether we execute the migrations as dry-run
:type pretend: bool
:rtype: count
"""
self._notes = []
migrations = sorted(self._repository.get_ran(), reverse=True)
count = len(migrations)
if count == 0:
self._note("<info>Nothing to rollback.</info>")
else:
for migration in migrations:
self._run_down(path, {"migration": migration}, pretend)
return count |
java | @Override
public EClass getIfcFeatureElementAddition() {
if (ifcFeatureElementAdditionEClass == null) {
ifcFeatureElementAdditionEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(266);
}
return ifcFeatureElementAdditionEClass;
} |
python | async def scan_for_units(self, iprange):
"""Scan local network for GH units."""
units = []
for ip_address in ipaddress.IPv4Network(iprange):
sock = socket.socket()
sock.settimeout(0.02)
host = str(ip_address)
try:
scan_result = sock.connect((host, PORT))
except socket.error:
scan_result = 1
_LOGGER.debug('Checking port connectivity on %s:%s',
host, (str(PORT)))
if scan_result is None:
ghlocalapi = DeviceInfo(self._loop, self._session, host)
await ghlocalapi.get_device_info()
data = ghlocalapi.device_info
if data is not None:
cap = data['device_info']['capabilities']
units.append({
'host': host,
'name': data['name'],
'model': data['device_info']['model_name'],
'assistant_supported': cap.get('assistant_supported',
False)
})
sock.close()
return units |
python | def is_valid_sid_for_chain(pid, sid):
"""Assert that ``sid`` can be assigned to the single object ``pid`` or to the chain
to which ``pid`` belongs.
- If the chain does not have a SID, the new SID must be previously unused.
- If the chain already has a SID, the new SID must match the existing SID.
"""
if not d1_gmn.app.did.is_valid_sid_for_chain(pid, sid):
existing_sid = d1_gmn.app.revision.get_sid_by_pid(pid)
raise d1_common.types.exceptions.IdentifierNotUnique(
0,
'A different SID is already assigned to the revision chain to which '
'the object being created or updated belongs. A SID cannot be changed '
'once it has been assigned to a chain. '
'existing_sid="{}", new_sid="{}", pid="{}"'.format(existing_sid, sid, pid),
) |
java | public static X509CertChainValidatorExt buildCertificateValidator(
String trustAnchorsDir, ValidationErrorListener validationErrorListener,
long updateInterval, boolean lazy) {
return buildCertificateValidator(trustAnchorsDir, validationErrorListener,
null, updateInterval, DEFAULT_NS_CHECKS, DEFAULT_CRL_CHECKS,
DEFAULT_OCSP_CHECKS, lazy);
} |
python | def workdir_loaded(func):
"""
Decorator to make sure that the workdir is loaded when calling the
decorated function
"""
@wraps(func)
def decorator(workdir, *args, **kwargs):
if not workdir.loaded:
workdir.load()
return func(workdir, *args, **kwargs)
return decorator |
java | public final boolean isEmpty() {
if (mDatas.isEmpty()) {
return true;
}
for (int i = 0; i < mDatas.size(); i++) {
if (!mDatas.get(i).isEmpty()) {
return false;
}
}
return true;
} |
java | public static String generateCacheFileFullPath(String url, File cacheDir) {
String fileName = md5(url);
File cacheFile = new File(cacheDir, fileName);
return cacheFile.getPath();
} |
python | def main(start, end, out):
"""
Scrape a MLBAM Data
:param start: Start Day(YYYYMMDD)
:param end: End Day(YYYYMMDD)
:param out: Output directory(default:"../output/mlb")
"""
try:
logging.basicConfig(level=logging.WARNING)
MlbAm.scrape(start, end, out)
except MlbAmBadParameter as e:
raise click.BadParameter(e) |
python | def values(self, *keys):
""" Return the values of the record, optionally filtering to
include only certain values by index or key.
:param keys: indexes or keys of the items to include; if none
are provided, all values will be included
:return: list of values
"""
if keys:
d = []
for key in keys:
try:
i = self.index(key)
except KeyError:
d.append(None)
else:
d.append(self[i])
return d
return list(self) |
python | def _read_chunk_from_socket(socket):
"""
(coroutine)
Turn socket reading into coroutine.
"""
fd = socket.fileno()
f = Future()
def read_callback():
get_event_loop().remove_reader(fd)
# Read next chunk.
try:
data = socket.recv(1024)
except OSError as e:
# On OSX, when we try to create a new window by typing "pymux
# new-window" in a centain pane, very often we get the following
# error: "OSError: [Errno 9] Bad file descriptor."
# This doesn't seem very harmful, and we can just try again.
logger.warning('Got OSError while reading data from client: %s. '
'Trying again.', e)
f.set_result('')
return
if data:
f.set_result(data)
else:
f.set_exception(BrokenPipeError)
get_event_loop().add_reader(fd, read_callback)
return f |
java | public static Set<String> tupleToString(Set<Tuple> set) {
Set<String> result = new LinkedHashSet<String>();
for (Tuple tuple : set) {
String element = tuple.getElement();
result.add(element);
}
return result;
} |
python | def fetch_by_client_id(self, client_id):
"""
Retrieves a client by its identifier.
:param client_id: The identifier of a client.
:return: An instance of :class:`oauth2.datatype.Client`.
:raises: :class:`oauth2.error.ClientError` if no client could be
retrieved.
"""
grants = None
redirect_uris = None
response_types = None
client_data = self.fetchone(self.fetch_client_query, client_id)
if client_data is None:
raise ClientNotFoundError
grant_data = self.fetchall(self.fetch_grants_query, client_data[0])
if grant_data:
grants = []
for grant in grant_data:
grants.append(grant[0])
redirect_uris_data = self.fetchall(self.fetch_redirect_uris_query,
client_data[0])
if redirect_uris_data:
redirect_uris = []
for redirect_uri in redirect_uris_data:
redirect_uris.append(redirect_uri[0])
response_types_data = self.fetchall(self.fetch_response_types_query,
client_data[0])
if response_types_data:
response_types = []
for response_type in response_types_data:
response_types.append(response_type[0])
return Client(identifier=client_data[1], secret=client_data[2],
authorized_grants=grants,
authorized_response_types=response_types,
redirect_uris=redirect_uris) |
java | public static void checkColorRenderableTexture2D(
final JCGLTextureFormat t)
throws JCGLExceptionFormatError
{
if (!isColorRenderable2D(t)) {
final String m = String.format(
"Format %s is not color-renderable for 2D textures", t);
assert m != null;
throw new JCGLExceptionFormatError(m);
}
} |
java | public Matrix4x3d lerp(Matrix4x3dc other, double t, Matrix4x3d dest) {
dest.m00 = m00 + (other.m00() - m00) * t;
dest.m01 = m01 + (other.m01() - m01) * t;
dest.m02 = m02 + (other.m02() - m02) * t;
dest.m10 = m10 + (other.m10() - m10) * t;
dest.m11 = m11 + (other.m11() - m11) * t;
dest.m12 = m12 + (other.m12() - m12) * t;
dest.m20 = m20 + (other.m20() - m20) * t;
dest.m21 = m21 + (other.m21() - m21) * t;
dest.m22 = m22 + (other.m22() - m22) * t;
dest.m30 = m30 + (other.m30() - m30) * t;
dest.m31 = m31 + (other.m31() - m31) * t;
dest.m32 = m32 + (other.m32() - m32) * t;
dest.properties = properties & other.properties();
return dest;
} |
java | public static <T> Collection<T> findAmongst(Class<T> clazz, Object ... instances) {
return findAmongst(clazz, Arrays.asList(instances));
} |
python | def _kalman_update_step(k, p_m , p_P, p_meas_model_callable, measurement, calc_log_likelihood= False,
calc_grad_log_likelihood=False, p_dm = None, p_dP = None):
"""
Input:
k: int
Iteration No. Starts at 0. Total number of iterations equal to the
number of measurements.
m_P: matrix of size (state_dim, time_series_no)
Mean value from the previous step. For "multiple time series mode"
it is matrix, second dimension of which correspond to different
time series.
p_P:
Covariance matrix from the prediction step.
p_meas_model_callable: object
measurement: (measurement_dim, time_series_no) matrix
One measurement used on the current update step. For
"multiple time series mode" it is matrix, second dimension of
which correspond to different time series.
calc_log_likelihood: boolean
Whether to calculate marginal likelihood of the state-space model.
calc_grad_log_likelihood: boolean
Whether to calculate gradient of the marginal likelihood
of the state-space model. If true then the next parameter must
provide the extra parameters for gradient calculation.
p_dm: 3D array (state_dim, time_series_no, parameters_no)
Mean derivatives from the prediction step. For "multiple time series mode"
it is 3D array, second dimension of which correspond to different
time series.
p_dP: array
Covariance derivatives from the prediction step.
Output:
----------------------------
m_upd, P_upd, dm_upd, dP_upd: metrices, 3D objects
Results of the prediction steps.
log_likelihood_update: double or 1D array
Update to the log_likelihood from this step
d_log_likelihood_update: (grad_params_no, time_series_no) matrix
Update to the gradient of log_likelihood, "multiple time series mode"
adds extra columns to the gradient.
"""
#import pdb; pdb.set_trace()
m_pred = p_m # from prediction step
P_pred = p_P # from prediction step
H = p_meas_model_callable.Hk(k, m_pred, P_pred)
R = p_meas_model_callable.Rk(k)
time_series_no = p_m.shape[1] # number of time serieses
log_likelihood_update=None; dm_upd=None; dP_upd=None; d_log_likelihood_update=None
# Update step (only if there is data)
#if not np.any(np.isnan(measurement)): # TODO: if some dimensions are missing, do properly computations for other.
v = measurement-p_meas_model_callable.f_h(k, m_pred, H)
S = H.dot(P_pred).dot(H.T) + R
if measurement.shape[0]==1: # measurements are one dimensional
if (S < 0):
raise ValueError("Kalman Filter Update: S is negative step %i" % k )
#import pdb; pdb.set_trace()
K = P_pred.dot(H.T) / S
if calc_log_likelihood:
log_likelihood_update = -0.5 * ( np.log(2*np.pi) + np.log(S) +
v*v / S)
#log_likelihood_update = log_likelihood_update[0,0] # to make int
if np.any(np.isnan(log_likelihood_update)): # some member in P_pred is None.
raise ValueError("Nan values in likelihood update!")
LL = None; islower = None
else:
LL,islower = linalg.cho_factor(S)
K = linalg.cho_solve((LL,islower), H.dot(P_pred.T)).T
if calc_log_likelihood:
log_likelihood_update = -0.5 * ( v.shape[0]*np.log(2*np.pi) +
2*np.sum( np.log(np.diag(LL)) ) +\
np.sum((linalg.cho_solve((LL,islower),v)) * v, axis = 0) ) # diagonal of v.T*S^{-1}*v
if calc_grad_log_likelihood:
dm_pred_all_params = p_dm # derivativas of the prediction phase
dP_pred_all_params = p_dP
param_number = p_dP.shape[2]
dH_all_params = p_meas_model_callable.dHk(k)
dR_all_params = p_meas_model_callable.dRk(k)
dm_upd = np.empty(dm_pred_all_params.shape)
dP_upd = np.empty(dP_pred_all_params.shape)
# firts dimension parameter_no, second - time series number
d_log_likelihood_update = np.empty((param_number,time_series_no))
for param in range(param_number):
dH = dH_all_params[:,:,param]
dR = dR_all_params[:,:,param]
dm_pred = dm_pred_all_params[:,:,param]
dP_pred = dP_pred_all_params[:,:,param]
# Terms in the likelihood derivatives
dv = - np.dot( dH, m_pred) - np.dot( H, dm_pred)
dS = np.dot(dH, np.dot( P_pred, H.T))
dS += dS.T
dS += np.dot(H, np.dot( dP_pred, H.T)) + dR
# TODO: maybe symmetrize dS
#dm and dP for the next stem
if LL is not None: # the state vector is not a scalar
tmp1 = linalg.cho_solve((LL,islower), H).T
tmp2 = linalg.cho_solve((LL,islower), dH).T
tmp3 = linalg.cho_solve((LL,islower), dS).T
else: # the state vector is a scalar
tmp1 = H.T / S
tmp2 = dH.T / S
tmp3 = dS.T / S
dK = np.dot( dP_pred, tmp1) + np.dot( P_pred, tmp2) - \
np.dot( P_pred, np.dot( tmp1, tmp3 ) )
# terms required for the next step, save this for each parameter
dm_upd[:,:,param] = dm_pred + np.dot(dK, v) + np.dot(K, dv)
dP_upd[:,:,param] = -np.dot(dK, np.dot(S, K.T))
dP_upd[:,:,param] += dP_upd[:,:,param].T
dP_upd[:,:,param] += dP_pred - np.dot(K , np.dot( dS, K.T))
dP_upd[:,:,param] = 0.5*(dP_upd[:,:,param] + dP_upd[:,:,param].T) #symmetrize
# computing the likelihood change for each parameter:
if LL is not None: # the state vector is not 1D
#tmp4 = linalg.cho_solve((LL,islower), dv)
tmp5 = linalg.cho_solve((LL,islower), v)
else: # the state vector is a scalar
#tmp4 = dv / S
tmp5 = v / S
d_log_likelihood_update[param,:] = -(0.5*np.sum(np.diag(tmp3)) + \
np.sum(tmp5*dv, axis=0) - 0.5 * np.sum(tmp5 * np.dot(dS, tmp5), axis=0) )
# Before
#d_log_likelihood_update[param,0] = -(0.5*np.sum(np.diag(tmp3)) + \
#np.dot(tmp5.T, dv) - 0.5 * np.dot(tmp5.T ,np.dot(dS, tmp5)) )
# Compute the actual updates for mean and variance of the states.
m_upd = m_pred + K.dot( v )
# Covariance update and ensure it is symmetric
P_upd = K.dot(S).dot(K.T)
P_upd = 0.5*(P_upd + P_upd.T)
P_upd = P_pred - P_upd# this update matrix is symmetric
return m_upd, P_upd, log_likelihood_update, dm_upd, dP_upd, d_log_likelihood_update |
java | private void cleanupInvalidRegistrationIDsForVariant(String variantID, MulticastResult multicastResult, List<String> registrationIDs) {
// get the FCM send results for all of the client devices:
final List<Result> results = multicastResult.getResults();
// storage for all the invalid registration IDs:
final Set<String> inactiveTokens = new HashSet<>();
// read the results:
for (int i = 0; i < results.size(); i++) {
// use the current index to access the individual results
final Result result = results.get(i);
final String errorCodeName = result.getErrorCodeName();
if (errorCodeName != null) {
logger.info(String.format("Processing [%s] error code from FCM response, for registration ID: [%s]", errorCodeName, registrationIDs.get(i)));
}
//after sending, lets find tokens that are inactive from now on and need to be replaced with the new given canonical id.
//according to fcm documentation, google refreshes tokens after some time. So the previous tokens will become invalid.
//When you send a notification to a registration id which is expired, for the 1st time the message(notification) will be delivered
//but you will get a new registration id with the name canonical id. Which mean, the registration id you sent the message to has
//been changed to this canonical id, so change it on your server side as well.
//check if current index of result has canonical id
String canonicalRegId = result.getCanonicalRegistrationId();
if (canonicalRegId != null) {
// same device has more than one registration id: update it, if needed!
// let's see if the canonical id is already in our system:
Installation installation = clientInstallationService.findInstallationForVariantByDeviceToken(variantID, canonicalRegId);
if (installation != null) {
// ok, there is already a device, with newest/latest registration ID (aka canonical id)
// It is time to remove the old reg id, to avoid duplicated messages in the future!
inactiveTokens.add(registrationIDs.get(i));
} else {
// since there is no registered device with newest/latest registration ID (aka canonical id),
// this means the new token/regId was never stored on the server. Let's update the device and change its token to new canonical id:
installation = clientInstallationService.findInstallationForVariantByDeviceToken(variantID,registrationIDs.get(i));
installation.setDeviceToken(canonicalRegId);
//update installation with the new token
logger.info(String.format("Based on returned canonical id from FCM, updating Android installations with registration id [%s] with new token [%s] ", registrationIDs.get(i), canonicalRegId));
clientInstallationService.updateInstallation(installation);
}
} else {
// is there any 'interesting' error code, which requires a clean up of the registration IDs
if (FCM_ERROR_CODES.contains(errorCodeName)) {
// Ok the result at INDEX 'i' represents a 'bad' registrationID
// Now use the INDEX of the _that_ result object, and look
// for the matching registrationID inside of the List that contains
// _all_ the used registration IDs and store it:
inactiveTokens.add(registrationIDs.get(i));
}
}
}
if (! inactiveTokens.isEmpty()) {
// trigger asynchronous deletion:
logger.info(String.format("Based on FCM response data and error codes, deleting %d invalid or duplicated Android installations", inactiveTokens.size()));
clientInstallationService.removeInstallationsForVariantByDeviceTokens(variantID, inactiveTokens);
}
} |
python | def system_reboot(wait_time_sec=20):
"""Reboots the system after a specified wait time. Must be run as root
:param wait_time_sec: (int) number of sec to wait before performing the reboot
:return: None
:raises: SystemRebootError, SystemRebootTimeoutError
"""
log = logging.getLogger(mod_logger + '.system_reboot')
try:
wait_time_sec = int(wait_time_sec)
except ValueError:
raise CommandError('wait_time_sec must be an int, or a string convertible to an int')
log.info('Waiting {t} seconds before reboot...'.format(t=str(wait_time_sec)))
time.sleep(wait_time_sec)
command = ['shutdown', '-r', 'now']
log.info('Shutting down with command: [{c}]'.format(c=' '.join(command)))
time.sleep(2)
log.info('Shutting down...')
try:
result = run_command(command=command, timeout_sec=60)
except CommandError:
_, ex, trace = sys.exc_info()
msg = 'There was a problem running shutdown command: [{c}]\n{e}'.format(c=' '.join(command), e=str(ex))
raise SystemRebootError, msg, trace
if result['code'] != 0:
msg = 'Shutdown command exited with a non-zero code: [{c}], and produced output:\n{o}'.format(
c=str(result['code']), o=result['output'])
raise SystemRebootError(msg)
log.info('Waiting 60 seconds to ensure the reboot completes...')
time.sleep(60)
msg = 'Reboot has not completed after 60 seconds'
log.error(msg)
raise SystemRebootTimeoutError(msg) |
python | def uniq(args):
"""
%prog uniq gffile > uniq.gff
Remove redundant gene models. For overlapping gene models, take the longest
gene. A second scan takes only the genes selected.
--mode controls whether you want larger feature, or higher scoring feature.
--best controls how many redundant features to keep, e.g. 10 for est2genome.
"""
supported_modes = ("span", "score")
p = OptionParser(uniq.__doc__)
p.add_option("--type", default="gene",
help="Types of features to non-redundify [default: %default]")
p.add_option("--mode", default="span", choices=supported_modes,
help="Pile mode [default: %default]")
p.add_option("--best", default=1, type="int",
help="Use best N features [default: %default]")
p.add_option("--name", default=False, action="store_true",
help="Non-redundify Name attribute [default: %default]")
p.add_option("--iter", default="2", choices=("1", "2"),
help="Number of iterations to grab children [default: %default]")
p.set_outfile()
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
gffile, = args
mode = opts.mode
bestn = opts.best
allgenes = import_feats(gffile, opts.type)
g = get_piles(allgenes)
bestids = set()
for group in g:
if mode == "span":
scores_group = [(- x.span, x) for x in group]
else:
scores_group = [(- float(x.score), x) for x in group]
scores_group.sort()
seen = set()
for score, x in scores_group:
if len(seen) >= bestn:
break
name = x.attributes["Name"][0] if opts.name else x.accn
if name in seen:
continue
seen.add(name)
bestids.add(x.accn)
populate_children(opts.outfile, bestids, gffile, iter=opts.iter) |
python | def load(self, config):
"""Load the password from the configuration file."""
password_dict = {}
if config is None:
logger.warning("No configuration file available. Cannot load password list.")
elif not config.has_section(self._section):
logger.warning("No [%s] section in the configuration file. Cannot load password list." % self._section)
else:
logger.info("Start reading the [%s] section in the configuration file" % self._section)
password_dict = dict(config.items(self._section))
# Password list loaded
logger.info("%s password(s) loaded from the configuration file" % len(password_dict))
logger.debug("Password dictionary: %s" % password_dict)
return password_dict |
java | public void selectValues(String... expectedValues) {
String[] values = checkSelectValues(expectedValues, 0, 0);
String reason = NO_ELEMENT_FOUND;
if (values == null && getElement().is().present()) {
reason = ELEMENT_NOT_SELECT;
}
assertNotNull(reason, values);
assertEquals("Selected Values Mismatch", Arrays.asList(expectedValues), Arrays.asList(values));
} |
java | @Override
public void visitMethod(Method obj) {
methodName = obj.getName();
state = State.SAW_NOTHING;
} |
python | def nunpack(s, default=0):
"""Unpacks 1 to 4 byte integers (big endian)."""
l = len(s)
if not l:
return default
elif l == 1:
return ord(s)
elif l == 2:
return struct.unpack('>H', s)[0]
elif l == 3:
return struct.unpack('>L', b'\x00'+s)[0]
elif l == 4:
return struct.unpack('>L', s)[0]
else:
raise TypeError('invalid length: %d' % l) |
java | public static DatePickerDialog newInstance(OnDateSetListener callBack, int year, int monthOfYear, int dayOfMonth) {
DatePickerDialog ret = new DatePickerDialog();
ret.initialize(callBack, year, monthOfYear, dayOfMonth);
return ret;
} |
java | public boolean containsFile(SftpFile f) {
return unchangedFiles.contains(f) || newFiles.contains(f)
|| updatedFiles.contains(f) || deletedFiles.contains(f)
|| recursedDirectories.contains(f.getAbsolutePath())
|| failedTransfers.containsKey(f);
} |
java | @Override
public void run() {
population = createInitialPopulation() ;
evaluatePopulation(population) ;
evaluations = populationSize ;
while (evaluations < maxEvaluations) {
List<S> offspringPopulation = new ArrayList<>(populationSize);
for (int i = 0; i < populationSize; i += 2) {
List<S> parents = new ArrayList<>(2);
parents.add(selectionOperator.execute(population));
parents.add(selectionOperator.execute(population));
List<S> offspring = crossoverOperator.execute(parents);
mutationOperator.execute(offspring.get(0));
mutationOperator.execute(offspring.get(1));
offspringPopulation.add(offspring.get(0));
offspringPopulation.add(offspring.get(1));
}
evaluatePopulation(offspringPopulation) ;
List<S> jointPopulation = new ArrayList<>();
jointPopulation.addAll(population);
jointPopulation.addAll(offspringPopulation);
Ranking<S> ranking = computeRanking(jointPopulation);
population = crowdingDistanceSelection(ranking) ;
evaluations += populationSize ;
}
} |
java | public JobWithDetails getJob(String jobName) throws IOException {
return getJob(null, UrlUtils.toFullJobPath(jobName));
} |
java | @Override
public void eUnset(int featureID) {
switch (featureID) {
case XtextPackage.GENERATED_METAMODEL__NAME:
setName(NAME_EDEFAULT);
return;
}
super.eUnset(featureID);
} |
java | public static String getName(String x) {
String ret = null;
if (x.equals("280")) {
ret = "DE";
} else if (x.equals("040")) {
ret = "AT";
} else if (x.equals("250")) {
ret = "FR";
} else if (x.equals("056")) {
ret = "BE";
} else if (x.equals("100")) {
ret = "BG";
} else if (x.equals("208")) {
ret = "DK";
} else if (x.equals("246")) {
ret = "FI";
} else if (x.equals("300")) {
ret = "GR";
} else if (x.equals("826")) {
ret = "GB";
} else if (x.equals("372")) {
ret = "IE";
} else if (x.equals("352")) {
ret = "IS";
} else if (x.equals("380")) {
ret = "IT";
} else if (x.equals("392")) {
ret = "JP";
} else if (x.equals("124")) {
ret = "CA";
} else if (x.equals("191")) {
ret = "HR";
} else if (x.equals("438")) {
ret = "LI";
} else if (x.equals("442")) {
ret = "LU";
} else if (x.equals("528")) {
ret = "NL";
} else if (x.equals("578")) {
ret = "NO";
} else if (x.equals("616")) {
ret = "PL";
} else if (x.equals("620")) {
ret = "PT";
} else if (x.equals("642")) {
ret = "RO";
} else if (x.equals("643")) {
ret = "RU";
} else if (x.equals("752")) {
ret = "SE";
} else if (x.equals("756")) {
ret = "CH";
} else if (x.equals("703")) {
ret = "SK";
} else if (x.equals("705")) {
ret = "SI";
} else if (x.equals("724")) {
ret = "ES";
} else if (x.equals("203")) {
ret = "CZ";
} else if (x.equals("792")) {
ret = "TR";
} else if (x.equals("348")) {
ret = "HU";
} else if (x.equals("840")) {
ret = "US";
} else if (x.equals("978")) {
ret = "EU";
} else {
throw new InvalidArgumentException(HBCIUtils.getLocMsg("EXC_DT_UNNKOWN_CTR", x));
}
return ret;
} |
java | public static Vector3d[] calcInvertedAxes(Vector3d aAxis, Vector3d bAxis, Vector3d cAxis) {
double det = aAxis.x * bAxis.y * cAxis.z - aAxis.x * bAxis.z * cAxis.y - aAxis.y * bAxis.x * cAxis.z + aAxis.y
* bAxis.z * cAxis.x + aAxis.z * bAxis.x * cAxis.y - aAxis.z * bAxis.y * cAxis.x;
Vector3d[] invaxes = new Vector3d[3];
invaxes[0] = new Vector3d();
invaxes[0].x = (bAxis.y * cAxis.z - bAxis.z * cAxis.y) / det;
invaxes[0].y = (bAxis.z * cAxis.x - bAxis.x * cAxis.z) / det;
invaxes[0].z = (bAxis.x * cAxis.y - bAxis.y * cAxis.x) / det;
invaxes[1] = new Vector3d();
invaxes[1].x = (aAxis.z * cAxis.y - aAxis.y * cAxis.z) / det;
invaxes[1].y = (aAxis.x * cAxis.z - aAxis.z * cAxis.x) / det;
invaxes[1].z = (aAxis.y * cAxis.x - aAxis.x * cAxis.y) / det;
invaxes[2] = new Vector3d();
invaxes[2].x = (aAxis.y * bAxis.z - aAxis.z * bAxis.y) / det;
invaxes[2].y = (aAxis.z * bAxis.x - aAxis.x * bAxis.z) / det;
invaxes[2].z = (aAxis.x * bAxis.y - aAxis.y * bAxis.x) / det;
return invaxes;
} |
python | def next_file(self, close_previous=True):
'''
Gets the next file to be scanned (including pending extracted files, if applicable).
Also re/initializes self.status.
All modules should access the target file list through this method.
'''
fp = None
# Ensure files are close to prevent IOError (too many open files)
if close_previous:
try:
self.previous_next_file_fp.close()
except KeyboardInterrupt as e:
raise e
except Exception:
pass
# Add any pending extracted files to the target_files list and reset
# the extractor's pending file list
self.target_file_list += self.extractor.pending
# Reset all dependencies prior to continuing with another file.
# This is particularly important for the extractor module, which must be reset
# in order to reset its base output directory path for each file, and the
# list of pending files.
self.reset_dependencies()
while self.target_file_list:
next_target_file = self.target_file_list.pop(0)
# Values in self.target_file_list are either already open files (BlockFile instances), or paths
# to files that need to be opened for scanning.
if isinstance(next_target_file, str):
fp = self.config.open_file(next_target_file)
else:
fp = next_target_file
if not fp:
break
else:
if (self.config.file_name_filter(fp) == False or
self._plugins_load_file(fp) == False):
fp.close()
fp = None
continue
else:
self.status.clear()
self.status.total = fp.length
break
if fp is not None:
self.current_target_file_name = fp.path
self.status.fp = fp
else:
self.current_target_file_name = None
self.status.fp = None
self.previous_next_file_fp = fp
self._plugins_new_file(fp)
return fp |
java | private void setAllowedDate(final long date) {
mAllowedDate = date;
if (mSharedPreferences == null) {
mSharedPreferences = mContext.getSharedPreferences(
PREFERENCES_GEOCODER, Context.MODE_PRIVATE);
}
final Editor e = mSharedPreferences.edit();
e.putLong(KEY_ALLOW, date);
e.apply();
} |
java | public static <Q> Object getPropertyValue(Q bean, String propertyName) {
return ObjectUtils.getPropertyValue(bean, propertyName, Object.class);
} |
python | def getsshkeys(self):
"""
Gets all the ssh keys for the current user
:return: a dictionary with the lists
"""
request = requests.get(
self.keys_url, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False |
java | @Override
protected Set<String> extractSslRefs(Map<String, Object> properties, List<IIOPEndpoint> endpoints) {
Set<String> result = new HashSet<String>();
for (IIOPEndpoint endpoint : endpoints) {
for (Map<String, Object> iiopsOptions : endpoint.getIiopsOptions()) {
String sslAliasName = (String) iiopsOptions.get("sslRef");
if (sslAliasName == null)
sslAliasName = defaultAlias;
result.add(sslAliasName);
}
}
result.addAll(new ClientConfigHelper(null, null, defaultAlias).extractSslRefs(properties));
result.addAll(new ServerConfigHelper(null, null, null, null, defaultAlias).extractSslRefs(properties));
return result;
} |
python | def get_patient_expression(job, patient_dict):
"""
Convenience function to get the expression from the patient dict
:param dict patient_dict: dict of patient info
:return: The gene and isoform expression
:rtype: toil.fileStore.FileID
"""
expression_archive = job.fileStore.readGlobalFile(patient_dict['expression_files'])
expression_archive = untargz(expression_archive, os.getcwd())
output_dict = {}
for filename in 'rsem.genes.results', 'rsem.isoforms.results':
output_dict[filename] = job.fileStore.writeGlobalFile(os.path.join(expression_archive,
filename))
return output_dict |
java | public void setScriptExtension(String scriptExtension) {
if (scriptExtension.startsWith("*.")) {
this.scriptExtension = scriptExtension;
} else if (scriptExtension.startsWith(".")) {
this.scriptExtension = "*" + scriptExtension;
} else {
this.scriptExtension = "*." + scriptExtension;
}
} |
java | public void seek(long pos) {
try {
raFile.seek(pos);
} catch (IOException e) {
logger.log(Level.SEVERE, null, e);
throw new RuntimeException(e);
}
} |
java | public void beginStart(String resourceGroupName, String jobName, StartStreamingJobParameters startJobParameters) {
beginStartWithServiceResponseAsync(resourceGroupName, jobName, startJobParameters).toBlocking().single().body();
} |
java | private void setValue(Collection<R> values) {
Collection<R> oldValues = this.values;
this.values = values;
maybeNotifyListeners(oldValues, values);
} |
python | def get_app(app=None, verbosity=0):
"""Uses django.db.djmodels.get_app and fuzzywuzzy to get the models module for a django app
Retrieve an app module from an app name string, even if mispelled (uses fuzzywuzzy to find the best match)
To get a list of all the apps use `get_app(None)` or `get_app([]) or get_app(())`
To get a single random app use `get_app('')`
>>> get_app('call').__class__.__name__ == 'module'
True
>>> get_app('model').__name__ == 'miner.models'
True
>>> isinstance(get_app('whatever'), ModuleType)
True
>>> isinstance(get_app(''), ModuleType)
True
isinstance(get_app(), ModuleType)
False
isinstance(get_app(), list)
True
"""
# print 'get_app(', app
if not app:
# for an empty list, tuple or None, just get all apps
if isinstance(app, (type(None), list, tuple)):
return [app_class.__package__ for app_class in djmodels.get_apps() if app_class and app_class.__package__]
# for a blank string, get the default app(s)
else:
if get_app.default:
return get_app(get_app.default)
else:
return djmodels.get_apps()[-1]
elif isinstance(app, ModuleType):
return app
elif isinstance(app, basestring):
if app.strip().endswith('.models'):
return get_app(app[:-len('.models')])
elif '.' in app:
return get_app('.'.join(app.split('.')[1:])) # django.db.models only looks at the module name in the INSTALLED_APPS list!
try:
if verbosity > 1:
print 'Attempting django.db.models.get_app(%r)' % app
return djmodels.get_app(app)
except ImproperlyConfigured:
if verbosity > 0:
print 'WARNING: unable to find app = %r' % app
if verbosity > 2:
print 'Trying a fuzzy match on app = %r' % app
app_names = [app_class.__package__ for app_class in djmodels.get_apps() if app_class and app_class.__package__]
fuzzy_app_name = fuzzy.extractOne(str(app), app_names)[0]
if verbosity > 0:
print 'WARNING: Best fuzzy match for app name %r is %s' % (app, fuzzy_app_name)
return djmodels.get_app(fuzzy_app_name.split('.')[-1]) |
java | public javax.servlet.RequestDispatcher getRequestDispatcher(String handler) {
MobicentsSipServlet sipServletImpl = (MobicentsSipServlet)
getSipSession().getSipApplicationSession().getSipContext().findSipServletByName(handler);
if(sipServletImpl == null) {
throw new IllegalArgumentException(handler + " is not a valid servlet name");
}
return new SipRequestDispatcher(sipServletImpl);
} |
python | def popen(fn, *args, **kwargs) -> subprocess.Popen:
"""
Please ensure you're not killing the process before it had started properly
:param fn:
:param args:
:param kwargs:
:return:
"""
args = popen_encode(fn, *args, **kwargs)
logging.getLogger(__name__).debug('Start %s', args)
p = subprocess.Popen(args)
return p |
python | def spare_disk(self, disk_xml=None):
""" Number of spare disk per type.
For example: storage.ontap.filer201.disk.SATA
"""
spare_disk = {}
disk_types = set()
for filer_disk in disk_xml:
disk_types.add(filer_disk.find('effective-disk-type').text)
if not filer_disk.find('raid-state').text == 'spare':
continue
disk_type = filer_disk.find('effective-disk-type').text
if disk_type in spare_disk:
spare_disk[disk_type] += 1
else:
spare_disk[disk_type] = 1
for disk_type in disk_types:
if disk_type in spare_disk:
self.push('spare_' + disk_type, 'disk', spare_disk[disk_type])
else:
self.push('spare_' + disk_type, 'disk', 0) |
python | def add_rule(self, rule_class, target_class=_Nothing):
"""Adds an authorization rule.
:param rule_class: a class of authorization rule.
:param target_class: (optional) a class
or an iterable with classes to associate the rule with.
"""
if isinstance(target_class, Iterable):
for cls in target_class:
self._rules[cls] = rule_class
else:
self._rules[target_class] = rule_class |
python | def run_from_argv(self, argv):
"""
Pre-parse the command line to extract the value of the --testrunner
option. This allows a test runner to define additional command line
arguments.
"""
option = '--testrunner='
for arg in argv[2:]:
if arg.startswith(option):
self.test_runner = arg[len(option):]
break
super(Command, self).run_from_argv(argv) |
java | public double[][] getS() {
double[][] S = new double[n][n];
for(int i = 0; i < n; i++) {
S[i][i] = this.s[i];
}
return S;
} |
python | def find_one(self, cls, id):
"""Required functionality."""
db_result = None
for rec in read_rec(cls.get_table_name(), id):
db_result = rec
break # Only read the first returned - which should be all we get
if not db_result:
return None
obj = cls.from_data(db_result['value'])
return obj |
python | def score(count_bigram, count1, count2, n_words):
"""Collocation score"""
if n_words <= count1 or n_words <= count2:
# only one words appears in the whole document
return 0
N = n_words
c12 = count_bigram
c1 = count1
c2 = count2
p = c2 / N
p1 = c12 / c1
p2 = (c2 - c12) / (N - c1)
score = (l(c12, c1, p) + l(c2 - c12, N - c1, p)
- l(c12, c1, p1) - l(c2 - c12, N - c1, p2))
return -2 * score |
python | def listify(args):
"""Return args as a list.
If already a list - return as is.
>>> listify([1, 2, 3])
[1, 2, 3]
If a set - return as a list.
>>> listify(set([1, 2, 3]))
[1, 2, 3]
If a tuple - return as a list.
>>> listify(tuple([1, 2, 3]))
[1, 2, 3]
If a generator (also range / xrange) - return as a list.
>>> listify(x + 1 for x in range(3))
[1, 2, 3]
>>> from past.builtins import xrange
>>> from builtins import range
>>> listify(xrange(1, 4))
[1, 2, 3]
>>> listify(range(1, 4))
[1, 2, 3]
If a single instance of something that isn't any of the above - put as a
single element of the returned list.
>>> listify(1)
[1]
If "empty" (None or False or '' or anything else that evaluates to False),
return an empty list ([]).
>>> listify(None)
[]
>>> listify(False)
[]
>>> listify('')
[]
>>> listify(0)
[]
>>> listify([])
[]
"""
if args:
if isinstance(args, list):
return args
elif isinstance(args, (set, tuple, GeneratorType,
range, past.builtins.xrange)):
return list(args)
return [args]
return [] |
python | def queue_path(cls, project, location, queue):
"""Return a fully-qualified queue string."""
return google.api_core.path_template.expand(
"projects/{project}/locations/{location}/queues/{queue}",
project=project,
location=location,
queue=queue,
) |
java | boolean addOutputToken(Token idToken, Token paraToken) {
tokens.add(idToken);
tokens.add(paraToken);
previousTextToken = null;
return prepareNextScan(0);
} |
java | public static List<Map<String, Object>> getListMap(String sql, Object... arg) {
Connection connection = JDBCUtils.getConnection();
PreparedStatement ps = null;
ResultSet result = null;
List<Map<String, Object>> listMap = new ArrayList<Map<String, Object>>();
try {
ps = connection.prepareStatement(sql);
for (int i = 0; i < arg.length; i++) {
ps.setObject(i + 1, arg[i]);
}
result = ps.executeQuery();
listMap = DBUtils.handleResultSetToMapList(result, listMap);
} catch (SQLException e) {
e.printStackTrace();
} finally {
JDBCUtils.release(result, ps, connection);
}
return listMap;
} |
java | public void rebuildNode(String fullPath) throws Exception
{
Preconditions.checkArgument(ZKPaths.getPathAndNode(fullPath).getPath().equals(path), "Node is not part of this cache: " + fullPath);
Preconditions.checkState(!executorService.isShutdown(), "cache has been closed");
ensurePath();
internalRebuildNode(fullPath);
// this is necessary so that any updates that occurred while rebuilding are taken
// have to rebuild entire tree in case this node got deleted in the interim
offerOperation(new RefreshOperation(this, RefreshMode.FORCE_GET_DATA_AND_STAT));
} |
java | @Override
public EEnum getIfcStairFlightTypeEnum() {
if (ifcStairFlightTypeEnumEEnum == null) {
ifcStairFlightTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(1070);
}
return ifcStairFlightTypeEnumEEnum;
} |
python | def Save(session, filename=None):
"""
save your session to use it later.
Returns the filename of the written file.
If not filename is given, a file named `androguard_session_<DATE>.ag` will
be created in the current working directory.
`<DATE>` is a timestamp with the following format: `%Y-%m-%d_%H%M%S`.
This function will overwrite existing files without asking.
If the file could not written, None is returned.
example::
s = session.Session()
session.Save(s, "msession.ag")
:param session: A Session object to save
:param filename: output filename to save the session
:type filename: string
"""
if not filename:
filename = "androguard_session_{:%Y-%m-%d_%H%M%S}.ag".format(datetime.datetime.now())
if os.path.isfile(filename):
log.warning("{} already exists, overwriting!")
# Setting the recursion limit according to the documentation:
# https://docs.python.org/3/library/pickle.html#what-can-be-pickled-and-unpickled
#
# Some larger APKs require a high recursion limit.
# Tested to be above 35000 for some files, setting to 50k to be sure.
# You might want to set this even higher if you encounter problems
reclimit = sys.getrecursionlimit()
sys.setrecursionlimit(50000)
saved = False
try:
with open(filename, "wb") as fd:
pickle.dump(session, fd)
saved = True
except RecursionError:
log.exception("Recursion Limit hit while saving. "
"Current Recursion limit: {}. "
"Please report this error!".format(sys.getrecursionlimit()))
# Remove partially written file
os.unlink(filename)
sys.setrecursionlimit(reclimit)
return filename if saved else None |
python | def get_instance(self, payload):
"""
Build an instance of BalanceInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.balance.BalanceInstance
:rtype: twilio.rest.api.v2010.account.balance.BalanceInstance
"""
return BalanceInstance(self._version, payload, account_sid=self._solution['account_sid'], ) |
python | def check_token(self, respond):
"""
Check is the user's token is valid
"""
if respond.status_code == 401:
self.credential.obtain_token(config=self.config)
return False
return True |
python | def simxSetUIButtonLabel(clientID, uiHandle, uiButtonID, upStateLabel, downStateLabel, operationMode):
'''
Please have a look at the function description/documentation in the V-REP user manual
'''
if sys.version_info[0] == 3:
if type(upStateLabel) is str:
upStateLabel=upStateLabel.encode('utf-8')
if type(downStateLabel) is str:
downStateLabel=downStateLabel.encode('utf-8')
return c_SetUIButtonLabel(clientID, uiHandle, uiButtonID, upStateLabel, downStateLabel, operationMode) |
python | def folder(self) -> typing.Union[str, None]:
"""
The folder, relative to the project source_directory, where the file
resides
:return:
"""
if 'folder' in self.data:
return self.data.get('folder')
elif self.project_folder:
if callable(self.project_folder):
return self.project_folder()
else:
return self.project_folder
return None |
java | public static FenceWait prepareWait(byte[] fenceId, TransactionSystemClient txClient)
throws TransactionFailureException, InterruptedException, TimeoutException {
return new DefaultFenceWait(new TransactionContext(txClient, new WriteFence(fenceId)));
} |
python | def msgbox(message='Shall I continue?', title='', buttonMessage='OK'):
"""Original doc: Display a messagebox
"""
return psidialogs.message(message=message, title=title, ok=buttonMessage) |
python | def sources(self):
"""
Returns a dictionary of source methods found on this object,
keyed on method name. Source methods are identified by
(self, context) arguments on this object. For example:
.. code-block:: python
def f(self, context):
...
is a source method, but
.. code-block:: python
def f(self, ctx):
...
is not.
"""
try:
return self._sources
except AttributeError:
self._sources = find_sources(self)
return self._sources |
java | @Override
public byte readDataType() {
// prevent the handling of an empty Object
if (buf.hasRemaining()) {
do {
// get the data type
currentDataType = buf.get();
log.trace("Data type: {}", currentDataType);
switch (currentDataType) {
case AMF.TYPE_NULL:
case AMF.TYPE_UNDEFINED:
return DataTypes.CORE_NULL;
case AMF.TYPE_NUMBER:
return DataTypes.CORE_NUMBER;
case AMF.TYPE_BOOLEAN:
return DataTypes.CORE_BOOLEAN;
case AMF.TYPE_STRING:
case AMF.TYPE_LONG_STRING:
return DataTypes.CORE_STRING;
case AMF.TYPE_CLASS_OBJECT:
case AMF.TYPE_OBJECT:
return DataTypes.CORE_OBJECT;
case AMF.TYPE_MIXED_ARRAY:
return DataTypes.CORE_MAP;
case AMF.TYPE_ARRAY:
return DataTypes.CORE_ARRAY;
case AMF.TYPE_DATE:
return DataTypes.CORE_DATE;
case AMF.TYPE_XML:
return DataTypes.CORE_XML;
case AMF.TYPE_REFERENCE:
return DataTypes.OPT_REFERENCE;
case AMF.TYPE_UNSUPPORTED:
case AMF.TYPE_MOVIECLIP:
case AMF.TYPE_RECORDSET:
// These types are not handled by core datatypes
// So add the amf mask to them, this way the deserializer
// will call back to readCustom, we can then handle or
// return null
return (byte) (currentDataType + DataTypes.CUSTOM_AMF_MASK);
case AMF.TYPE_AMF3_OBJECT:
log.debug("Switch to AMF3");
return DataTypes.CORE_SWITCH;
}
} while (hasMoreProperties());
log.trace("No more data types available");
return DataTypes.CORE_END_OBJECT;
}
// empty object, may as well be null
return DataTypes.CORE_NULL;
} |
python | def get_url(self, link):
"""
URL of service
"""
view_name = SupportedServices.get_detail_view_for_model(link.service)
return reverse(view_name, kwargs={'uuid': link.service.uuid.hex}, request=self.context['request']) |
python | def plot(self, minx=-1.5, maxx=1.2, miny=-0.2, maxy=2, **kwargs):
"""Helper function to plot the Muller potential
"""
import matplotlib.pyplot as pp
grid_width = max(maxx-minx, maxy-miny) / 200.0
ax = kwargs.pop('ax', None)
xx, yy = np.mgrid[minx:maxx:grid_width, miny:maxy:grid_width]
V = self.potential(xx, yy)
# clip off any values greater than 200, since they mess up
# the color scheme
if ax is None:
ax = pp
ax.contourf(xx, yy, V.clip(max=200), 40, **kwargs) |
java | protected boolean isQuoteDelimiter( String character ) {
String quoteDelimiters = "\"'";
if (quoteDelimiters.indexOf(character) < 0)
return false;
else
return true;
} |
python | def _get_firmware_embedded_health(self, data):
"""Parse the get_host_health_data() for server capabilities
:param data: the output returned by get_host_health_data()
:returns: a dictionary of firmware name and firmware version.
"""
firmware = self.get_value_as_list(data['GET_EMBEDDED_HEALTH_DATA'],
'FIRMWARE_INFORMATION')
if firmware is None:
return None
return dict((y['FIRMWARE_NAME']['VALUE'],
y['FIRMWARE_VERSION']['VALUE'])
for x in firmware for y in x.values()) |
python | def parse_veto_definer(veto_def_filename):
""" Parse a veto definer file from the filename and return a dictionary
indexed by ifo and veto definer category level.
Parameters
----------
veto_def_filename: str
The path to the veto definer file
Returns:
parsed_definition: dict
Returns a dictionary first indexed by ifo, then category level, and
finally a list of veto definitions.
"""
from glue.ligolw import table, lsctables, utils as ligolw_utils
from glue.ligolw.ligolw import LIGOLWContentHandler as h
lsctables.use_in(h)
indoc = ligolw_utils.load_filename(veto_def_filename, False,
contenthandler=h)
veto_table = table.get_table(indoc, 'veto_definer')
ifo = veto_table.getColumnByName('ifo')
name = veto_table.getColumnByName('name')
version = numpy.array(veto_table.getColumnByName('version'))
category = numpy.array(veto_table.getColumnByName('category'))
start = numpy.array(veto_table.getColumnByName('start_time'))
end = numpy.array(veto_table.getColumnByName('end_time'))
start_pad = numpy.array(veto_table.getColumnByName('start_pad'))
end_pad = numpy.array(veto_table.getColumnByName('end_pad'))
data = {}
for i in range(len(veto_table)):
if ifo[i] not in data:
data[ifo[i]] = {}
# The veto-definer categories are weird! Hardware injections are stored
# in "3" and numbers above that are bumped up by one (although not
# often used any more). So we remap 3 to H and anything above 3 to
# N-1. 2 and 1 correspond to 2 and 1 (YAY!)
if category[i] > 3:
curr_cat = "CAT_{}".format(category[i]-1)
elif category[i] == 3:
curr_cat = "CAT_H"
else:
curr_cat = "CAT_{}".format(category[i])
if curr_cat not in data[ifo[i]]:
data[ifo[i]][curr_cat] = []
veto_info = {'name': name[i],
'version': version[i],
'start': start[i],
'end': end[i],
'start_pad': start_pad[i],
'end_pad': end_pad[i],
}
data[ifo[i]][curr_cat].append(veto_info)
return data |
python | def flush(self, indices=None, refresh=None):
"""
Flushes one or more indices (clear memory)
If a bulk is full, it sends it.
(See :ref:`es-guide-reference-api-admin-indices-flush`)
:keyword indices: an index or a list of indices
:keyword refresh: set the refresh parameter
"""
self.conn.force_bulk()
path = self.conn._make_path(indices, '_flush')
args = {}
if refresh is not None:
args['refresh'] = refresh
return self.conn._send_request('POST', path, params=args) |
python | def _as_json(self, response):
"""Assuming this is not empty, return the content as JSON.
Result/exceptions is not determined if you call this method without testing _is_empty.
:raises: DeserializationError if response body contains invalid json data.
"""
# Assume ClientResponse has "body", and otherwise it's a requests.Response
content = response.text() if hasattr(response, "body") else response.text
try:
return json.loads(content)
except ValueError:
raise DeserializationError(
"Error occurred in deserializing the response body.") |
python | def crop(stream, x, y, width, height, **kwargs):
"""Crop the input video.
Args:
x: The horizontal position, in the input video, of the left edge of
the output video.
y: The vertical position, in the input video, of the top edge of the
output video.
width: The width of the output video. Must be greater than 0.
heigth: The height of the output video. Must be greater than 0.
Official documentation: `crop <https://ffmpeg.org/ffmpeg-filters.html#crop>`__
"""
return FilterNode(
stream,
crop.__name__,
args=[width, height, x, y],
kwargs=kwargs
).stream() |
java | public List<String> getValuesForFieldInPolicy(String sec, String ptype, int fieldIndex) {
List<String> values = new ArrayList<>();
for (List<String> rule : model.get(sec).get(ptype).policy) {
values.add(rule.get(fieldIndex));
}
Util.arrayRemoveDuplicates(values);
return values;
} |
python | def solve_dv_dt_v1(self):
"""Solve the differential equation of HydPy-L.
At the moment, HydPy-L only implements a simple numerical solution of
its underlying ordinary differential equation. To increase the accuracy
(or sometimes even to prevent instability) of this approximation, one
can set the value of parameter |MaxDT| to a value smaller than the actual
simulation step size. Method |solve_dv_dt_v1| then applies the methods
related to the numerical approximation multiple times and aggregates
the results.
Note that the order of convergence is one only. It is hard to tell how
short the internal simulation step needs to be to ensure a certain degree
of accuracy. In most cases one hour or very often even one day should be
sufficient to gain acceptable results. However, this strongly depends on
the given water stage-volume-discharge relationship. Hence it seems
advisable to always define a few test waves and apply the llake model with
different |MaxDT| values. Afterwards, select a |MaxDT| value lower than
one which results in acceptable approximations for all test waves. The
computation time of the llake mode per substep is rather small, so always
include a safety factor.
Of course, an adaptive step size determination would be much more
convenient...
Required derived parameter:
|NmbSubsteps|
Used aide sequence:
|llake_aides.V|
|llake_aides.QA|
Updated state sequence:
|llake_states.V|
Calculated flux sequence:
|llake_fluxes.QA|
Note that method |solve_dv_dt_v1| calls the versions of `calc_vq`,
`interp_qa` and `calc_v_qa` selected by the respective application model.
Hence, also their parameter and sequence specifications need to be
considered.
Basic equation:
:math:`\\frac{dV}{dt}= QZ - QA(V)`
"""
der = self.parameters.derived.fastaccess
flu = self.sequences.fluxes.fastaccess
old = self.sequences.states.fastaccess_old
new = self.sequences.states.fastaccess_new
aid = self.sequences.aides.fastaccess
flu.qa = 0.
aid.v = old.v
for _ in range(der.nmbsubsteps):
self.calc_vq()
self.interp_qa()
self.calc_v_qa()
flu.qa += aid.qa
flu.qa /= der.nmbsubsteps
new.v = aid.v |
java | public void updateItem (T item)
{
if (_items == null) {
return;
}
int idx = _items.indexOf(item);
if (idx == -1) {
_items.add(0, item);
} else {
_items.set(idx, item);
}
} |
python | def update_user(self, user_is_artist="", artist_level="", artist_specialty="", real_name="", tagline="", countryid="", website="", bio=""):
"""Update the users profile information
:param user_is_artist: Is the user an artist?
:param artist_level: If the user is an artist, what level are they
:param artist_specialty: If the user is an artist, what is their specialty
:param real_name: The users real name
:param tagline: The users tagline
:param countryid: The users location
:param website: The users personal website
:param bio: The users bio
"""
if self.standard_grant_type is not "authorization_code":
raise DeviantartError("Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.")
post_data = {}
if user_is_artist:
post_data["user_is_artist"] = user_is_artist
if artist_level:
post_data["artist_level"] = artist_level
if artist_specialty:
post_data["artist_specialty"] = artist_specialty
if real_name:
post_data["real_name"] = real_name
if tagline:
post_data["tagline"] = tagline
if countryid:
post_data["countryid"] = countryid
if website:
post_data["website"] = website
if bio:
post_data["bio"] = bio
response = self._req('/user/profile/update', post_data=post_data)
return response['success'] |
python | def generate_lines(self, infile):
""" Split file into lines
return dict with line=input, depth=n
"""
pound = '#'
for line in infile:
heading = 0
if line.startswith(pound):
heading = self.hash_count(line)
yield dict(line=line, heading=heading) |
java | public static boolean coreExpandArchive(String sourcePath, String targetPath) throws IOException {
ZipArchiveInputStream in = null;
OutputStream out = null;
try {
// make sure we're working with absolute canonical paths
File source = new File(sourcePath).getCanonicalFile();
File target = new File(targetPath).getCanonicalFile();
// open the archive
in = new ZipArchiveInputStream(getInputStream(source));
// expand all entries of the archive
for (ZipArchiveEntry entry = in.getNextZipEntry(); entry != null; entry = in.getNextZipEntry()) {
// get the pathname of the entry (this will be a relative path)
String outFilename = entry.getName();
// construct the absolute path of where this entry will be expanded
String targetPlusOutFile = target.getPath() + File.separator + outFilename;
File targetFile = new File(targetPlusOutFile);
char ending = outFilename.charAt(outFilename.length() - 1);
if (ending == '/' || ending == '\\') {
fileMkDirs(targetFile);
if (helper != null) {
helper.setPermissions(targetFile, entry.getUnixMode());
}
continue;
} else {
fileMkDirs(targetFile.getParentFile());
}
// expand the entry
out = getOutputStream(targetFile);
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
// Close the streams
out.close();
if (helper != null) {
helper.setPermissions(targetFile, entry.getUnixMode());
}
out = null;
}
in.close();
in = null;
return true;
} catch (IOException e) {
throw e;
} finally {
// try to close any open files
tryToClose(out);
tryToClose(in);
}
} |
java | synchronized public void recover() {
if (tc.isEntryEnabled())
Tr.entry(tc, "recover", this);
final int state = _status.getState();
if (_subordinate) {
// For a subordinate, first check whether the global outcome is known locally.
switch (state) {
// Due to the possibility of recovery being attempted asynchronously to
// an incoming superior request, we must cover the case where the
// transaction has now actually committed already.
case TransactionState.STATE_HEURISTIC_ON_COMMIT:
case TransactionState.STATE_COMMITTED:
case TransactionState.STATE_COMMITTING:
recoverCommit(true);
break;
// Due to the possibility of recovery being attempted asynchronously to
// an incoming superior request, we must cover the case where the
// transaction has now actually rolled back already.
case TransactionState.STATE_HEURISTIC_ON_ROLLBACK:
case TransactionState.STATE_ROLLED_BACK:
case TransactionState.STATE_ROLLING_BACK:
recoverRollback(true);
break;
// For a subordinate, the replay_completion method is invoked on the superior.
// If the number of times the replay_completion has been retried is greater
// than the value specified by COMMITRETRY, then HEURISTICDIRECTION is used
// to determine the transaction outcome.
default:
// If we were imported from a JCA provider, check whether it's still installed.
// If so, we need do nothing here since we expect the RA to complete the transaction.
// Otherwise, we will complete using the configured direction.
if (_JCARecoveryData != null) {
final String id = _JCARecoveryData.getWrapper().getProviderId();
if (TMHelper.isProviderInstalled(id)) {
if (tc.isDebugEnabled())
Tr.debug(tc, "recover", "Do nothing. Expect provider " + id + " will complete.");
// Do nothing. RA is responsible for completing.
} else {
switch (_configProvider.getHeuristicCompletionDirection()) {
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_COMMIT:
Tr.error(tc, "WTRN0098_COMMIT_RA_UNINSTALLED", new Object[] { getTranName(), id });
recoverCommit(false);
break;
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_MANUAL:
// do nothing, administrative completion is required
_needsManualCompletion = true;
Tr.info(tc, "WTRN0101_MANUAL_RA_UNINSTALLED", new Object[] { getTranName(), id });
break;
default:
Tr.error(tc, "WTRN0099_ROLLBACK_RA_UNINSTALLED", new Object[] { getTranName(), id });
recoverRollback(false);
}
}
} else {
retryCompletion();
}
break;
}
} else {
// For a top-level Transaction, we will only recover in the case
// where we have successfully prepared. If the state is not committing,
// then assume it is rollback.
if (state == TransactionState.STATE_LAST_PARTICIPANT) {
// LIDB1673-13 lps heuristic completion.
// The transaction was attempting to complete its
// 1PC resource when the server went down.
// Use the lpsHeuristicCompletion flag to determine
// how to complete the tx.
switch (ConfigurationProviderManager.getConfigurationProvider().getHeuristicCompletionDirection()) {
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_COMMIT:
Tr.error(tc, "WTRN0096_HEURISTIC_MAY_HAVE_OCCURED", getTranName());
recoverCommit(false);
break;
case ConfigurationProvider.HEURISTIC_COMPLETION_DIRECTION_MANUAL:
// do nothing!?
_needsManualCompletion = true;
Tr.info(tc, "WTRN0097_HEURISTIC_MANUAL_COMPLETION", getTranName());
break;
default:
Tr.error(tc, "WTRN0102_HEURISTIC_MAY_HAVE_OCCURED", getTranName());
recoverRollback(false);
}
} else if (state == TransactionState.STATE_COMMITTING)
recoverCommit(false);
else
recoverRollback(false);
}
if (tc.isEntryEnabled())
Tr.exit(tc, "recover");
} |
java | public static PropDefConceptId getInstance(String propId,
String propertyName, Value value, Metadata metadata) {
List<Object> key = new ArrayList<>(4);
key.add(propId);
key.add(propertyName);
key.add(value);
key.add(Boolean.TRUE); //distinguishes these from properties represented as a modifier.
PropDefConceptId conceptId = (PropDefConceptId) metadata.getFromConceptIdCache(key);
if (conceptId == null) {
conceptId = new PropDefConceptId(propId, propertyName, value, metadata);
metadata.putInConceptIdCache(key, conceptId);
}
return conceptId;
} |
python | def copy_data(self, project, logstore, from_time, to_time=None,
to_client=None, to_project=None, to_logstore=None,
shard_list=None,
batch_size=None, compress=None, new_topic=None, new_source=None):
"""
copy data from one logstore to another one (could be the same or in different region), the time is log received time on server side.
:type project: string
:param project: project name
:type logstore: string
:param logstore: logstore name
:type from_time: string/int
:param from_time: curosr value, could be begin, timestamp or readable time in readable time like "%Y-%m-%d %H:%M:%S<time_zone>" e.g. "2018-01-02 12:12:10+8:00", also support human readable string, e.g. "1 hour ago", "now", "yesterday 0:0:0", refer to https://aliyun-log-cli.readthedocs.io/en/latest/tutorials/tutorial_human_readable_datetime.html
:type to_time: string/int
:param to_time: curosr value, default is "end", could be begin, timestamp or readable time in readable time like "%Y-%m-%d %H:%M:%S<time_zone>" e.g. "2018-01-02 12:12:10+8:00", also support human readable string, e.g. "1 hour ago", "now", "yesterday 0:0:0", refer to https://aliyun-log-cli.readthedocs.io/en/latest/tutorials/tutorial_human_readable_datetime.html
:type to_client: LogClient
:param to_client: logclient instance, if empty will use source client
:type to_project: string
:param to_project: project name, if empty will use source project
:type to_logstore: string
:param to_logstore: logstore name, if empty will use source logstore
:type shard_list: string
:param shard_list: shard number list. could be comma seperated list or range: 1,20,31-40
:type batch_size: int
:param batch_size: batch size to fetch the data in each iteration. by default it's 500
:type compress: bool
:param compress: if use compression, by default it's True
:type new_topic: string
:param new_topic: overwrite the copied topic with the passed one
:type new_source: string
:param new_source: overwrite the copied source with the passed one
:return: LogResponse {"total_count": 30, "shards": {0: 10, 1: 20} })
"""
return copy_data(self, project, logstore, from_time, to_time=to_time,
to_client=to_client, to_project=to_project, to_logstore=to_logstore,
shard_list=shard_list,
batch_size=batch_size, compress=compress, new_topic=new_topic, new_source=new_source) |
java | @Override
public StopApplicationResult stopApplication(StopApplicationRequest request) {
request = beforeClientExecution(request);
return executeStopApplication(request);
} |
java | public static <Item extends IItem> void attachToView(final EventHook<Item> event, final RecyclerView.ViewHolder viewHolder, View view) {
if (event instanceof ClickEventHook) {
view.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//get the adapter for this view
Object tagAdapter = viewHolder.itemView.getTag(R.id.fastadapter_item_adapter);
if (tagAdapter instanceof FastAdapter) {
FastAdapter<Item> adapter = (FastAdapter<Item>) tagAdapter;
//we get the adapterPosition from the viewHolder
int pos = adapter.getHolderAdapterPosition(viewHolder);
//make sure the click was done on a valid item
if (pos != RecyclerView.NO_POSITION) {
Item item = adapter.getItem(pos);
if (item != null) {
//we update our item with the changed property
((ClickEventHook<Item>) event).onClick(v, pos, adapter, item);
}
}
}
}
});
} else if (event instanceof LongClickEventHook) {
view.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
//get the adapter for this view
Object tagAdapter = viewHolder.itemView.getTag(R.id.fastadapter_item_adapter);
if (tagAdapter instanceof FastAdapter) {
FastAdapter<Item> adapter = (FastAdapter<Item>) tagAdapter;
//we get the adapterPosition from the viewHolder
int pos = adapter.getHolderAdapterPosition(viewHolder);
//make sure the click was done on a valid item
if (pos != RecyclerView.NO_POSITION) {
Item item = adapter.getItem(pos);
if (item != null) {
//we update our item with the changed property
return ((LongClickEventHook<Item>) event).onLongClick(v, pos, adapter, item);
}
}
}
return false;
}
});
} else if (event instanceof TouchEventHook) {
view.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent e) {
//get the adapter for this view
Object tagAdapter = viewHolder.itemView.getTag(R.id.fastadapter_item_adapter);
if (tagAdapter instanceof FastAdapter) {
FastAdapter<Item> adapter = (FastAdapter<Item>) tagAdapter;
//we get the adapterPosition from the viewHolder
int pos = adapter.getHolderAdapterPosition(viewHolder);
//make sure the click was done on a valid item
if (pos != RecyclerView.NO_POSITION) {
Item item = adapter.getItem(pos);
if (item != null) {
//we update our item with the changed property
return ((TouchEventHook<Item>) event).onTouch(v, e, pos, adapter, item);
}
}
}
return false;
}
});
} else if (event instanceof CustomEventHook) {
//we trigger the event binding
((CustomEventHook<Item>) event).attachEvent(view, viewHolder);
}
} |
java | @Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case AfplibPackage.GSLJ__LINEJOIN:
return LINEJOIN_EDEFAULT == null ? linejoin != null : !LINEJOIN_EDEFAULT.equals(linejoin);
}
return super.eIsSet(featureID);
} |
python | def _execute_handling(self, *eopatches, **kwargs):
""" Handles measuring execution time and error propagation
"""
self.private_task_config.start_time = datetime.datetime.now()
caught_exception = None
try:
return_value = self.execute(*eopatches, **kwargs)
except BaseException as exception:
caught_exception = exception, sys.exc_info()[2]
if caught_exception is not None: # Exception is not raised in except statement to prevent duplicated traceback
exception, traceback = caught_exception
raise type(exception)('During execution of task {}: {}'.format(self.__class__.__name__,
exception)).with_traceback(traceback)
self.private_task_config.end_time = datetime.datetime.now()
return return_value |
java | @SuppressWarnings("unchecked")
public void fromDmr(Object entity, String javaName, ModelType dmrType, Class<?> propertyType, ModelNode dmrPayload) throws Exception {
Method target = entity.getClass().getMethod(javaName, propertyType);
List<ModelNode> items = dmrPayload.isDefined() ? dmrPayload.asList() : Collections.EMPTY_LIST;
if(items.isEmpty()) {
target.invoke(entity, Collections.EMPTY_LIST);
}
else
{
List list = new ArrayList(items.size());
// java type is derived from list item dmr type
ModelType listValueType = items.get(0).getType();
for (ModelNode item : items) {
addJavaValueTo(list, listValueType, item);
}
target.invoke(entity, list);
}
} |
java | private List<Object> _jdoExecuteQuery(
final String oql,
final Object[] params
)
{
List<Object> results = null;
try {
results = getExtendedCastorTemplate().findByQuery( oql, params );
} catch (DataAccessException ex) {
throw new PersistenceException( ex.getMostSpecificCause() );
}
return results;
} |
java | public HostAvailabilityListener withHostUnavailableExceptions(Class<Throwable>... exceptionTypes) {
hostUnavailableExceptions = new ArrayList<>();
for ( Class<Throwable> exception : exceptionTypes ) {
hostUnavailableExceptions.add(exception);
}
return this;
} |
python | def unescape(s):
r"""Inverse of `escape`.
>>> unescape(r'\x41\n\x42\n\x43')
'A\nB\nC'
>>> unescape(r'\u86c7')
u'\u86c7'
>>> unescape(u'ah')
u'ah'
"""
if re.search(r'(?<!\\)\\(\\\\)*[uU]', s) or isinstance(s, unicode):
return unescapeUnicode(s)
else:
return unescapeAscii(s) |
python | def ipv6_generate_random(total=100):
"""
The generator to produce random, unique IPv6 addresses that are not
defined (can be looked up using ipwhois).
Args:
total (:obj:`int`): The total number of IPv6 addresses to generate.
Yields:
str: The next IPv6 address.
"""
count = 0
yielded = set()
while count < total:
address = str(IPv6Address(random.randint(0, 2**128-1)))
if not ipv6_is_defined(address)[0] and address not in yielded:
count += 1
yielded.add(address)
yield address |
java | @Override
public void characters(char[] ch, int start, int length) throws SAXException {
// 得到单元格内容的值
lastContent = lastContent.concat(new String(ch, start, length));
} |
java | @Override
public void start(Xid xid, int flags) throws XAException {
synchronized (cpoXaStateMap) {
// see if we are already associated with a global transaction
if (cpoXaStateMap.getXaResourceMap().get(this) != null)
throw CpoXaError.createXAException(CpoXaError.XAER_PROTO, "Start can not be called on an associated XID");
// see if we are not in the middle of doing something on the local transaction
if (isLocalResourceBusy())
throw CpoXaError.createXAException(CpoXaError.XAER_OUTSIDE, "Local Transaction is busy");
CpoXaState<T> cpoXaState = cpoXaStateMap.getXidStateMap().get(xid);
switch (flags) {
case TMNOFLAGS: // Starting a new transaction ID
// if it is already in use then throw a dupe id error
if (cpoXaState != null)
throw CpoXaError.createXAException(CpoXaError.XAER_DUPID, "Duplicate XID");
cpoXaState = new CpoXaState<>(xid, createNewResource(), CpoXaState.XA_ASSOCIATED, this, true);
cpoXaStateMap.getXidStateMap().put(xid, cpoXaState);
cpoXaStateMap.getXaResourceMap().put(this, xid);
break;
case TMJOIN:
if (cpoXaState == null)
throw CpoXaError.createXAException(CpoXaError.XAER_NOTA, "Unknown XID");
if (cpoXaState.getAssociation()==CpoXaState.XA_UNASSOCIATED) {
cpoXaState.setAssociation(CpoXaState.XA_ASSOCIATED);
cpoXaState.setAssignedResourceManager(this);
cpoXaStateMap.getXaResourceMap().put(this, xid);
} else {
throw CpoXaError.createXAException(CpoXaError.XAER_PROTO, "TMJOIN can only be used with an unassociated XID");
}
break;
case TMRESUME:
if (cpoXaState == null)
throw CpoXaError.createXAException(CpoXaError.XAER_NOTA, "Unknown XID");
// you can only resume a suspended transaction
if (cpoXaState.getAssociation() == CpoXaState.XA_SUSPENDED) {
cpoXaState.setAssociation(CpoXaState.XA_ASSOCIATED);
cpoXaState.setAssignedResourceManager(this);
cpoXaStateMap.getXaResourceMap().put(this, xid);
} else {
throw CpoXaError.createXAException(CpoXaError.XAER_PROTO, "TMRESUME can only be used with a suspended XID");
}
break;
default: // invalid arguments
throw CpoXaError.createXAException(CpoXaError.XAER_INVAL, "Invalid start() flag");
}
}
} |
python | def launch_keyword_wizard(self):
"""Launch keyword creation wizard."""
# make sure selected layer is the output layer
if self.iface.activeLayer() != self.output_layer:
return
# launch wizard dialog
keyword_wizard = WizardDialog(
self.iface.mainWindow(), self.iface, self.dock_widget)
keyword_wizard.set_keywords_creation_mode(self.output_layer)
keyword_wizard.exec_() |
python | def commit(self, wait=False, additionalParams={}):
"""
Commit is called once all parts are uploaded during a multipart Add
Item or Update Item operation. The parts are combined into a file,
and the original file is overwritten during an Update Item
operation. This is an asynchronous call and returns immediately.
Status can be used to check the status of the operation until it is
completed.
Inputs:
itemId - unique item id
folderId - folder id value, optional
wait - stops the thread and waits for the commit to finish or fail.
additionalParams - optional key/value pair like
type : "File Geodatabase". This is mainly used
when multipart uploads occur.
"""
url = "%s/commit" % self.root
params = {
"f" : "json",
}
for key, value in additionalParams.items():
params[key] = value
if wait == True:
res = self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url)
res = self.status()
import time
while res['status'].lower() in ["partial", "processing"]:
time.sleep(2)
res = self.status()
return res
else:
return self._post(url=url,
param_dict=params,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_url) |
python | def json(self):
"""
convert webhook data to json
:return webhook data as json:
"""
data = dict()
embeds = self.embeds
self.embeds = list()
# convert DiscordEmbed to dict
for embed in embeds:
self.add_embed(embed)
for key, value in self.__dict__.items():
if value and key not in ['url', 'files', 'filename']:
data[key] = value
embeds_empty = all(not embed for embed in data["embeds"]) if 'embeds' in data else True
if embeds_empty and 'content' not in data and bool(self.files) is False:
logger.error('webhook message is empty! set content or embed data')
return data |
java | private Set<String> getStringSet(I_CmsXmlContentLocation val, String path) {
Set<String> valueSet = new HashSet<String>();
if ((val != null)) {
List<I_CmsXmlContentValueLocation> singleValueLocs = val.getSubValues(path);
for (I_CmsXmlContentValueLocation singleValueLoc : singleValueLocs) {
String value = singleValueLoc.getValue().getStringValue(m_cms).trim();
valueSet.add(value);
}
}
return valueSet;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.