language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def _set_add_(self, v, load=False):
"""
Setter method for add_, mapped from YANG variable /telemetry/profile/mpls_traffic_fec/add (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_add_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_add_() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("object",add_.add_, yang_name="add", rest_name="add", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='object', extensions={u'tailf-common': {u'callpoint': u'MplstrafficfecProfileObject', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Add MPLS traffic telemetry object'}}), is_container='list', yang_name="add", rest_name="add", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'MplstrafficfecProfileObject', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Add MPLS traffic telemetry object'}}, namespace='urn:brocade.com:mgmt:brocade-telemetry', defining_module='brocade-telemetry', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """add_ must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("object",add_.add_, yang_name="add", rest_name="add", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='object', extensions={u'tailf-common': {u'callpoint': u'MplstrafficfecProfileObject', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Add MPLS traffic telemetry object'}}), is_container='list', yang_name="add", rest_name="add", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'MplstrafficfecProfileObject', u'cli-suppress-list-no': None, u'cli-suppress-mode': None, u'info': u'Add MPLS traffic telemetry object'}}, namespace='urn:brocade.com:mgmt:brocade-telemetry', defining_module='brocade-telemetry', yang_type='list', is_config=True)""",
})
self.__add_ = t
if hasattr(self, '_set'):
self._set()
|
python
|
def is_admin(self):
"""
Check that the user has a role with the code 'admin'
"""
for ur in self.roleusers:
if ur.role.code == 'admin':
return True
return False
|
python
|
def convert_notebook(self, name):
"""Converts a notebook into a python file."""
#subprocess.call(["jupyter","nbconvert","--to","python",
# self.get_path("%s.ipynb"%name)])
exporter = nbconvert.exporters.python.PythonExporter()
relative_path = self.convert_path(name)
file_path = self.get_path("%s.ipynb"%relative_path)
code = exporter.from_filename(file_path)[0]
self.write_code(name, code)
self.clean_code(name, [])
|
python
|
def activities(self, *args, **kwargs):
"""Retrieve activities belonging to this scope.
See :class:`pykechain.Client.activities` for available parameters.
"""
if self._client.match_app_version(label='wim', version='<2.0.0', default=True):
return self._client.activities(*args, scope=self.id, **kwargs)
else:
return self._client.activities(*args, scope_id=self.id, **kwargs)
|
python
|
def get_null_proj(self,nsing=None):
""" get a null-space projection matrix of XTQX
Parameters
----------
nsing: int
optional number of singular components to use
If Nonte, then nsing is determined from
call to MonteCarlo.get_nsing()
Returns
-------
v2_proj : pyemu.Matrix
the null-space projection matrix (V2V2^T)
"""
if nsing is None:
nsing = self.get_nsing()
if nsing is None:
raise Exception("nsing is None")
print("using {0} singular components".format(nsing))
self.log("forming null space projection matrix with " +\
"{0} of {1} singular components".format(nsing,self.jco.shape[1]))
v2_proj = (self.xtqx.v[:,nsing:] * self.xtqx.v[:,nsing:].T)
self.log("forming null space projection matrix with " +\
"{0} of {1} singular components".format(nsing,self.jco.shape[1]))
return v2_proj
|
java
|
public List<Specification> getSpecifications(SystemUnderTest sut, Repository repository)
{
final Criteria crit = sessionService.getSession().createCriteria(Specification.class);
crit.createAlias("repository", "repo");
crit.add(Restrictions.eq("repo.uid", repository.getUid()));
crit.createAlias("targetedSystemUnderTests", "suts");
crit.add(Restrictions.eq("suts.name", sut.getName()));
crit.createAlias("suts.project", "sp");
crit.add(Restrictions.eq("sp.name", sut.getProject().getName()));
crit.addOrder(Order.asc("name"));
@SuppressWarnings("unchecked")
List<Specification> specifications = crit.list();
HibernateLazyInitializer.initCollection(specifications);
return specifications;
}
|
python
|
def remove_authentication(self, auth_name=None, organization=None):
"""
Remove the current authentication or the one given by `auth_name`
"""
if auth_name:
if organization:
url = '%s/authentications/org/%s/name/%s' % (self.domain, organization, auth_name)
else:
url = '%s/authentications/name/%s' % (self.domain, auth_name)
else:
url = '%s/authentications' % (self.domain,)
res = self.session.delete(url)
self._check_response(res, [201])
|
python
|
async def prompt(self, text=None):
'''
Prompt for user input from stdin.
'''
if self.sess is None:
hist = FileHistory(s_common.getSynPath('cmdr_history'))
self.sess = PromptSession(history=hist)
if text is None:
text = self.cmdprompt
with patch_stdout():
retn = await self.sess.prompt(text, async_=True, vi_mode=self.vi_mode, enable_open_in_editor=True)
return retn
|
python
|
def update_channels(self):
"""Update the GUI to reflect channels and image listing.
"""
if not self.gui_up:
return
self.logger.debug("channel configuration has changed--updating gui")
try:
channel = self.fv.get_channel(self.chname)
except KeyError:
channel = self.fv.get_channel_info()
if channel is None:
raise ValueError('No channel available')
self.chname = channel.name
w = self.w.channel_name
w.clear()
self.chnames = list(self.fv.get_channel_names())
#self.chnames.sort()
for chname in self.chnames:
w.append_text(chname)
# select the channel that is the current one
try:
i = self.chnames.index(channel.name)
except IndexError:
i = 0
self.w.channel_name.set_index(i)
# update the image listing
self.redo()
|
java
|
public HTTPClientConfiguration createHTTPClientConfiguration(ConfigurationHolder configurationHolder)
{
//get server values
String hostName=configurationHolder.getConfigurationValue(HTTPClientConfigurationConstants.HOST_NAME_PROPERTY_KEY);
if(hostName==null)
{
throw new FaxException("Host name not defined in property: "+HTTPClientConfigurationConstants.HOST_NAME_PROPERTY_KEY);
}
String value=configurationHolder.getConfigurationValue(HTTPClientConfigurationConstants.PORT_PROPERTY_KEY);
if(value==null)
{
value=String.valueOf(-1);
}
int port=Integer.parseInt(value);
boolean ssl=Boolean.parseBoolean(configurationHolder.getConfigurationValue(HTTPClientConfigurationConstants.SSL_PROPERTY_KEY));
//create configuration
CommonHTTPClientConfiguration configuration=new CommonHTTPClientConfiguration();
//set values
configuration.setHostName(hostName);
configuration.setPort(port);
configuration.setSSL(ssl);
//set methods
Enum<?>[] methodProperties=new Enum<?>[]{HTTPClientConfigurationConstants.SUBMIT_HTTP_METHOD_PROPERTY_KEY,
HTTPClientConfigurationConstants.SUSPEND_HTTP_METHOD_PROPERTY_KEY,
HTTPClientConfigurationConstants.RESUME_HTTP_METHOD_PROPERTY_KEY,
HTTPClientConfigurationConstants.CANCEL_HTTP_METHOD_PROPERTY_KEY,
HTTPClientConfigurationConstants.GET_STATUS_HTTP_METHOD_PROPERTY_KEY};
FaxActionType[] faxActionTypes=new FaxActionType[]{FaxActionType.SUBMIT_FAX_JOB,
FaxActionType.SUSPEND_FAX_JOB,
FaxActionType.RESUME_FAX_JOB,
FaxActionType.CANCEL_FAX_JOB,
FaxActionType.GET_FAX_JOB_STATUS};
HTTPMethod httpMethod=null;
for(int index=0;index<methodProperties.length;index++)
{
//set next method
value=configurationHolder.getConfigurationValue(methodProperties[index]);
httpMethod=HTTPMethod.POST;
if(value!=null)
{
httpMethod=HTTPMethod.valueOf(value);
}
configuration.setMethod(faxActionTypes[index],httpMethod);
}
return configuration;
}
|
python
|
def status(self):
'''returns rates'''
counts = {}
for bucket in self.buckets:
for x in bucket:
if not x in counts:
counts[x] = 0
counts[x] += bucket[x]
ret = ""
mtypes = counts.keys()
mtypes.sort()
for mtype in mtypes:
ret += "%s: %0.1f/s\n" % (mtype,
counts[mtype]/float(len(self.buckets)))
return ret
|
java
|
public String findParameterAliasByName(String name) {
if (parameterName2Alias.containsKey(name)) {
return parameterName2Alias.get(name);
}
return name;
}
|
python
|
def remove_p(self):
""" Like :meth:`remove`, but does not raise an exception if the
file does not exist. """
try:
self.unlink()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOENT:
raise
return self
|
java
|
public void setHeader(@NonNull View view, boolean padding, boolean divider, DimenHolder height) {
mDrawerBuilder.getHeaderAdapter().clear();
if (padding) {
mDrawerBuilder.getHeaderAdapter().add(new ContainerDrawerItem().withView(view).withDivider(divider).withHeight(height).withViewPosition(ContainerDrawerItem.Position.TOP));
} else {
mDrawerBuilder.getHeaderAdapter().add(new ContainerDrawerItem().withView(view).withDivider(divider).withHeight(height).withViewPosition(ContainerDrawerItem.Position.NONE));
}
//we need to set the padding so the header starts on top
mDrawerBuilder.mRecyclerView.setPadding(mDrawerBuilder.mRecyclerView.getPaddingLeft(), 0, mDrawerBuilder.mRecyclerView.getPaddingRight(), mDrawerBuilder.mRecyclerView.getPaddingBottom());
}
|
python
|
def parse_from_parent(
self,
parent, # type: ET.Element
state # type: _ProcessorState
):
# type: (...) -> Any
"""Parse the array data from the provided parent XML element."""
item_iter = parent.findall(self._item_path)
return self._parse(item_iter, state)
|
java
|
public final ListFindingTypeStatsResponse listFindingTypeStats(ScanRunName parent) {
ListFindingTypeStatsRequest request =
ListFindingTypeStatsRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listFindingTypeStats(request);
}
|
python
|
def random(self, shape, tf_fn, kwargs):
"""Call a random tf operation (e.g. random_uniform).
Args:
shape: a Shape
tf_fn: a function such as tf.random.uniform
kwargs: kwargs to pass to tf_fn, except for seed
Returns:
a LaidOutTensor
"""
# TODO(noam): can we make things better with stateless_random?
slice_shape = self.slice_shape(shape)
x = tf_fn(slice_shape, **kwargs)
# TPU does not have seeds enabled. Sync up the
# random choices by zeroing out all but the first core per group of
# identical slices, then allreducing by group.
layout = self.tensor_layout(shape)
# we need to sync across these axes.
mesh_axes = [i for i in xrange(self.ndims)
if i not in layout.tensor_axis_to_mesh_axis]
multiplier = 1.0
for axis in mesh_axes:
multiplier *= tf.cast(
tf.equal(self.laid_out_pcoord(axis).one_slice, 0), x.dtype)
x *= multiplier
x = self.LaidOutTensor([x])
x = self.allreduce(x, mesh_axes, "SUM")
return x
|
java
|
public void put(Object[] subscript, Object value) {
put(BrokerUtil.buildSubscript(subscript), value);
}
|
java
|
public ServiceFuture<StorageAccountInfoInner> getStorageAccountAsync(String resourceGroupName, String accountName, String storageAccountName, final ServiceCallback<StorageAccountInfoInner> serviceCallback) {
return ServiceFuture.fromResponse(getStorageAccountWithServiceResponseAsync(resourceGroupName, accountName, storageAccountName), serviceCallback);
}
|
java
|
protected void connectAndLogin() throws IOException {
if (!ftpClient.isConnected()) {
ftpClient.connect(getEndpointConfiguration().getHost(), getEndpointConfiguration().getPort());
if (log.isDebugEnabled()) {
log.debug("Connected to FTP server: " + ftpClient.getReplyString());
}
int reply = ftpClient.getReplyCode();
if (!FTPReply.isPositiveCompletion(reply)) {
throw new CitrusRuntimeException("FTP server refused connection.");
}
log.info("Opened connection to FTP server");
if (getEndpointConfiguration().getUser() != null) {
if (log.isDebugEnabled()) {
log.debug(String.format("Login as user: '%s'", getEndpointConfiguration().getUser()));
}
boolean login = ftpClient.login(getEndpointConfiguration().getUser(), getEndpointConfiguration().getPassword());
if (!login) {
throw new CitrusRuntimeException(String.format("Failed to login to FTP server using credentials: %s:%s", getEndpointConfiguration().getUser(), getEndpointConfiguration().getPassword()));
}
}
if (getEndpointConfiguration().isLocalPassiveMode()) {
ftpClient.enterLocalPassiveMode();
}
}
}
|
java
|
public double[][] getCovmat() {
// Since we worked only on params with dofit=true, we need to expand the
// matrix to cover all parameters.
double[][] fullcov = new double[numparams][numparams];
for(int i = 0, i2 = 0; i < numparams; i++) {
for(int j = 0, j2 = 0; j < numparams; j++) {
fullcov[i][j] = (dofit[i] && dofit[j]) ? covmat[i2][j2] : 0;
if(dofit[j]) {
j2++;
}
}
if(dofit[i]) {
i2++;
}
}
return fullcov;
}
|
java
|
private <I extends Message, O extends Message> void invoke(
ServerMethod<I, O> method, ByteString payload, long requestId, Channel channel) {
FutureCallback<O> callback = new ServerMethodCallback<>(method, requestId, channel);
try {
I request = method.inputParser().parseFrom(payload);
ListenableFuture<O> result = method.invoke(request);
pendingRequests.put(requestId, result);
Futures.addCallback(result, callback, responseCallbackExecutor);
} catch (InvalidProtocolBufferException ipbe) {
callback.onFailure(ipbe);
}
}
|
python
|
def check(self):
"""
Returns #True if the timeout is exceeded.
"""
if self.value is None:
return False
return (time.clock() - self.start) >= self.value
|
python
|
def cross(environment, book, row, sheet_source, column_source, column_key):
"""
Returns a single value from a column from a different dataset, matching by the key.
"""
a = book.sheets[sheet_source]
return environment.copy(a.get(**{column_key: row[column_key]})[column_source])
|
java
|
public void resetInterval(Localizable localizable)
{
final int intervalHorizontalOld = intervalHorizontal;
final int intervalVerticalOld = intervalVertical;
final double oldX = getX();
final double oldY = getY();
setIntervals(0, 0);
offset.setLocation(0.0, 0.0);
setLocation(localizable.getX(), localizable.getY());
final double newX = getX();
final double newY = getY();
moveLocation(1.0, oldX - newX, oldY - newY);
moveLocation(1.0, newX - oldX, newY - oldY);
setIntervals(intervalHorizontalOld, intervalVerticalOld);
offset.setLocation(0.0, 0.0);
}
|
python
|
def paw_header(filename, ppdesc):
"""
Parse the PAW abinit header. Examples:
Paw atomic data for element Ni - Generated by AtomPAW (N. Holzwarth) + AtomPAW2Abinit v3.0.5
28.000 18.000 20061204 : zatom,zion,pspdat
7 7 2 0 350 0. : pspcod,pspxc,lmax,lloc,mmax,r2well
paw3 1305 : pspfmt,creatorID
5 13 : basis_size,lmn_size
0 0 1 1 2 : orbitals
3 : number_of_meshes
1 3 350 1.1803778368E-05 3.5000000000E-02 : mesh 1, type,size,rad_step[,log_step]
2 1 921 2.500000000000E-03 : mesh 2, type,size,rad_step[,log_step]
3 3 391 1.1803778368E-05 3.5000000000E-02 : mesh 3, type,size,rad_step[,log_step]
2.3000000000 : r_cut(SPH)
2 0.
Another format:
C (US d-loc) - PAW data extracted from US-psp (D.Vanderbilt) - generated by USpp2Abinit v2.3.0
6.000 4.000 20090106 : zatom,zion,pspdat
7 11 1 0 560 0. : pspcod,pspxc,lmax,lloc,mmax,r2well
paw4 2230 : pspfmt,creatorID
4 8 : basis_size,lmn_size
0 0 1 1 : orbitals
5 : number_of_meshes
1 2 560 1.5198032759E-04 1.6666666667E-02 : mesh 1, type,size,rad_step[,log_step]
2 2 556 1.5198032759E-04 1.6666666667E-02 : mesh 2, type,size,rad_step[,log_step]
3 2 576 1.5198032759E-04 1.6666666667E-02 : mesh 3, type,size,rad_step[,log_step]
4 2 666 1.5198032759E-04 1.6666666667E-02 : mesh 4, type,size,rad_step[,log_step]
5 2 673 1.5198032759E-04 1.6666666667E-02 : mesh 5, type,size,rad_step[,log_step]
1.5550009124 : r_cut(PAW)
3 0. : shape_type,rshape
Yet nnother one:
Paw atomic data for element Si - Generated by atompaw v3.0.1.3 & AtomPAW2Abinit v3.3.1
14.000 4.000 20120814 : zatom,zion,pspdat
7 11 1 0 663 0. : pspcod,pspxc,lmax,lloc,mmax,r2well
paw5 1331 : pspfmt,creatorID
4 8 : basis_size,lmn_size
0 0 1 1 : orbitals
5 : number_of_meshes
1 2 663 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 1, type,size,rad_step[,log_step]
2 2 658 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 2, type,size,rad_step[,log_step]
3 2 740 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 3, type,size,rad_step[,log_step]
4 2 819 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 4, type,size,rad_step[,log_step]
5 2 870 8.2129718540404674E-04 1.1498160595656655E-02 : mesh 5, type,size,rad_step[,log_step]
1.5669671236 : r_cut(PAW)
2 0. : shape_type,rshape
"""
supported_formats = ["paw3", "paw4", "paw5"]
if ppdesc.format not in supported_formats:
raise NotImplementedError("format %s not in %s" % (ppdesc.format, supported_formats))
lines = _read_nlines(filename, -1)
summary = lines[0]
header = _dict_from_lines(lines[:5], [0, 3, 6, 2, 2], sep=":")
lines = lines[5:]
# TODO
# Parse orbitals and number of meshes.
header["orbitals"] = [int(t) for t in lines[0].split(":")[0].split()]
header["number_of_meshes"] = num_meshes = int(lines[1].split(":")[0])
#print filename, header
# Skip meshes =
lines = lines[2+num_meshes:]
#for midx in range(num_meshes):
# l = midx + 1
#print lines[0]
header["r_cut"] = float(lines[0].split(":")[0])
#print lines[1]
header.update(_dict_from_lines(lines[1], [2], sep=":"))
#print("PAW header\n", header)
return PawAbinitHeader(summary, **header)
|
java
|
private void updateTextArea(String value) {
String oldValue = m_textArea.getValue();
if (!oldValue.equals(value)) {
int l1 = oldValue.split("\n").length;
int l2 = value.split("\n").length;
m_textArea.setValue(value);
if (l1 != l2) {
scheduleResize();
}
}
}
|
python
|
def _checkContactType(self, contactType):
"""
Possibly emit some warnings about C{contactType}'s implementation of
L{IContactType}.
@type contactType: L{IContactType} provider
"""
if getattr(contactType, 'getEditFormForPerson', None) is None:
warn(
"IContactType now has the 'getEditFormForPerson'"
" method, but %s did not implement it." % (
contactType.__class__,),
category=PendingDeprecationWarning)
if getattr(contactType, 'getEditorialForm', None) is not None:
warn(
"The IContactType %s defines the 'getEditorialForm'"
" method, which is deprecated. 'getEditFormForPerson'"
" does something vaguely similar." % (contactType.__class__,),
category=DeprecationWarning)
|
python
|
def find_consensus(bases):
"""
find consensus base based on nucleotide
frequencies
"""
nucs = ['A', 'T', 'G', 'C', 'N']
total = sum([bases[nuc] for nuc in nucs if nuc in bases])
# save most common base as consensus (random nuc if there is a tie)
try:
top = max([bases[nuc] for nuc in nucs if nuc in bases])
except:
bases['consensus'] = ('N', 'n/a')
bases['consensus frequency'] = 'n/a'
bases['reference frequency'] = 'n/a'
return bases
top = [(nuc, bases[nuc]) for nuc in bases if bases[nuc] == top]
if top[0][1] == 0:
bases['consensus'] = ('n/a', 0)
else:
bases['consensus'] = random.choice(top)
if total == 0:
c_freq = 'n/a'
ref_freq = 'n/a'
else:
c_freq = float(bases['consensus'][1]) / float(total)
if bases['ref'] not in bases:
ref_freq = 0
else:
ref_freq = float(bases[bases['ref']]) / float(total)
bases['consensus frequency'] = c_freq
bases['reference frequency'] = ref_freq
return bases
|
java
|
@Override
public Timer createCalendarTimer(ScheduleExpression schedule, TimerConfig timerConfig)
{
Serializable info = timerConfig == null ? null : timerConfig.getInfo();
boolean persistent = timerConfig == null || timerConfig.isPersistent();
boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (isTraceOn && tc.isEntryEnabled())
Tr.entry(tc, "createCalendarTimer: " + persistent, this);
// Bean must implement TimedObject interface or have a timeout method to create a timer.
if (!home.beanMetaData.isTimedObject)
{
IllegalStateException ise = new IllegalStateException(
"Timer Service: Bean does not implement TimedObject: " + beanId);
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createCalendarTimer: " + ise);
throw ise;
}
if (home.beanMetaData.isEntityBean()) // d595255
{
IllegalStateException ise = new IllegalStateException(
"Timer Service: Entity beans cannot use calendar-based timers: " + beanId);
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createCalendarTimer: " + ise);
throw ise;
}
// Determine if this bean is in a state that allows timer service
// method access - throws IllegalStateException if not allowed.
checkTimerServiceAccess();
// Make sure the arguments are valid....
if (schedule == null)
{
IllegalArgumentException ise = new IllegalArgumentException(
"TimerService: schedule not a valid value: null");
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createCalendarTimer: " + ise);
throw ise;
}
Timer timer = container.getEJBRuntime().createTimer(this, null, -1, schedule, info, persistent); // F743-13022
if (isTraceOn && tc.isEntryEnabled())
Tr.exit(tc, "createCalendarTimer: " + timer);
return timer;
}
|
java
|
private RetriesExhaustedWithDetailsException getExceptions()
throws RetriesExhaustedWithDetailsException {
if (!hasExceptions.get()) {
return null;
}
ArrayList<MutationException> mutationExceptions = null;
synchronized (globalExceptions) {
hasExceptions.set(false);
if (globalExceptions.isEmpty()) {
return null;
}
mutationExceptions = new ArrayList<>(globalExceptions);
globalExceptions.clear();
}
List<Throwable> problems = new ArrayList<>(mutationExceptions.size());
ArrayList<String> hostnames = new ArrayList<>(mutationExceptions.size());
List<Row> failedMutations = new ArrayList<>(mutationExceptions.size());
if (!mutationExceptions.isEmpty()) {
LOG.warn("Exception occurred in BufferedMutator", mutationExceptions.get(0).throwable);
}
for (MutationException mutationException : mutationExceptions) {
problems.add(mutationException.throwable);
failedMutations.add(mutationException.mutation);
hostnames.add(host);
LOG.debug("Exception occurred in BufferedMutator", mutationException.throwable);
}
return new RetriesExhaustedWithDetailsException(problems, failedMutations, hostnames);
}
|
python
|
def choose_init(module):
"""
Select a init system
Returns the name of a init system (upstart, sysvinit ...).
"""
if module.normalized_release.int_major < 7:
return 'sysvinit'
if not module.conn.remote_module.path_exists("/usr/lib/systemd/system/ceph.target"):
return 'sysvinit'
if is_systemd(module.conn):
return 'systemd'
return 'systemd'
|
java
|
private CmsCreatableListItem makeNavigationLevelItem() {
CmsNewResourceInfo typeInfo = getController().getData().getNewNavigationLevelElementInfo();
CmsListItemWidget widget = new CmsListItemWidget(typeInfo);
CmsCreatableListItem listItem = new CmsCreatableListItem(widget, typeInfo, NewEntryType.regular);
listItem.initMoveHandle(CmsSitemapView.getInstance().getTree().getDnDHandler());
return listItem;
}
|
java
|
@Override
public void store(BinaryEntry entry) {
try {
ByteString key = getRiakKey(entry);
ByteString value = ByteString.copyFrom(entry.getBinaryValue().toByteArray());
client.store(new RiakObject(bucket, key, value));
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
|
python
|
def preferred_jvm_distribution(cls, platforms, strict=False):
"""Returns a jvm Distribution with a version that should work for all the platforms.
Any one of those distributions whose version is >= all requested platforms' versions
can be returned unless strict flag is set.
:param iterable platforms: An iterable of platform settings.
:param bool strict: If true, only distribution whose version matches the minimum
required version can be returned, i.e, the max target_level of all the requested
platforms.
:returns: Distribution one of the selected distributions.
"""
if not platforms:
return DistributionLocator.cached()
min_version = max(platform.target_level for platform in platforms)
max_version = Revision(*(min_version.components + [9999])) if strict else None
return DistributionLocator.cached(minimum_version=min_version, maximum_version=max_version)
|
java
|
public static <K, V> HashMap<K, V> toMap(Iterable<Entry<K, V>> entryIter) {
return IterUtil.toMap(entryIter);
}
|
java
|
public void add(long timestamp, Map<String, Object> data) {
for (String k : data.keySet()) {
if (!columns.contains(k)) {
throw new UnknownFieldException(k);
}
}
Map<String, Object> curr = this.rows.get(timestamp);
if (curr == null) {
this.rows.put(timestamp, new HashMap<String, Object>(data));
} else {
curr.putAll(data);
}
}
|
python
|
def xsDateTime_parse(xdt_str, local_tz=None):
"""
Parses xsDateTime strings of form 2017-01-27T14:58:00+0600, etc.
Returns a *naive* datetime in local time according to local_tz.
"""
if not isinstance(xdt_str, basestring):
raise InvalidXSDateTime(
"Expecting str or unicode, got {}.".format(type(xdt_str))
)
try:
# This won't parse the offset (or other tzinfo)
naive_dt = datetime.strptime(xdt_str[0:XSDT_TZ_OFFSET], XSDT_FMT)
except:
raise InvalidXSDateTime("Malformed date/time ('%s')." % (xdt_str,))
naive_len = XSDT_TZ_OFFSET
offset_len = len(xdt_str) - naive_len
offset_str = xdt_str[-offset_len:]
offset_hours = None
offset_minutes = None
offset_sign = 1
parsed = None
# Parse fractional seconds if present
fsec_i = 0
if not offset_len:
parsed = naive_dt
elif offset_str[0] is '.':
if offset_len > 1:
fsec_i = 1
fsec_chr = offset_str[fsec_i]
fsec = ''
while fsec_chr.isdigit():
fsec += fsec_chr
fsec_i += 1
if fsec_i >= offset_len:
break
fsec_chr = offset_str[fsec_i]
fsec = float('.'+fsec)
naive_dt += timedelta(milliseconds=fsec*1000)
else:
raise InvalidXSDateTime('Malformed fractional seconds.')
# Reset offset length and set offset string to actual offset,
# if we found fractional seconds -- otherwise this is all a noop
offset_len -= fsec_i
if offset_len:
offset_str = offset_str[fsec_i:fsec_i+offset_len+1]
else:
offset_str = ''
# Get local timezone info using local_tz (tzinfo)
# throws pytz.exceptions.UnknownTimezoneError
# on bad timezone name
if local_tz is None:
local_tz = DEFAULT_LOCAL_TZ
# Parse offset
if not offset_len:
# If there is no offset, assume local time
# and return the naive datetime we have
parsed = naive_dt
return parsed
# +00:00
elif offset_len is 6:
if offset_str[0] not in "+-":
raise InvalidXSDateTime("Malformed offset (missing sign).")
elif offset_str[0] is '-':
offset_sign = -1
try:
offset_hours = int(offset_str[1:3])
except:
raise InvalidXSDateTime("Malformed offset (invalid hours '%s')"
% (offset_str[1:3],))
if offset_str[3] is not ':':
raise InvalidXSDateTime("Colon missing in offset (no colon).")
try:
offset_minutes = int(offset_str[4:6])
except:
raise InvalidXSDateTime("Malformed offset (invalid minutes '%s')"
% (offset_str[4:6],))
offset = offset_hours * 60 + offset_minutes
offset *= offset_sign
faux_timezone = XSDateTimezone(offset_hours, offset_minutes, offset_sign)
parsed = naive_dt.replace(tzinfo=faux_timezone)
# Z
elif offset_len is 1:
if offset_str is 'Z':
parsed = naive_dt.replace(tzinfo=XSDateTimezone())
else:
raise InvalidXSDateTime("Unrecognized timezone identifier '%s'." %
(offset_str,))
else:
raise InvalidXSDateTime("Malformed offset '%s'." % (offset_str,))
# We've parsed the offset str. now,
# Flatten datetime w/ tzinfo into a
# Naive datetime, utc
offset = parsed.utcoffset()
parsed = parsed.replace(tzinfo=None)
if offset is not None:
parsed -= offset
# Add utc timezone info
parsed = utc_tz.localize(parsed)
# Convert to local timezone and make naive again
parsed = parsed.astimezone(local_tz).replace(tzinfo=None)
return parsed
|
java
|
public long toNanos() {
long result = Jdk8Methods.safeMultiply(seconds, NANOS_PER_SECOND);
result = Jdk8Methods.safeAdd(result, nanos);
return result;
}
|
java
|
public void recognizeText(String url, TextRecognitionMode mode) {
recognizeTextWithServiceResponseAsync(url, mode).toBlocking().single().body();
}
|
java
|
protected void activateDestination(DestinationHandler dh)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "activateDestination", dh);
if (dh.isLink())
{
linkIndex.create(dh);
}
else
{
destinationIndex.create(dh);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "activateDestination");
}
|
java
|
private List<String> getDefaultTextValue(FormField field) {
// Get the Id
String fieldId = field.getName();
// Create new HashMap 'fieldAttributes' and new list 'definedValues'
Map<String, String> fieldAttributes = new HashMap<String, String>();
List<String> definedValues = new ArrayList<String>();
//Store all values in the FormFiled field into the Map 'fieldAttributes'
fieldAttributes.putAll(field.getFormControl().getAttributesMap());
// Places a key, Control Type, for each FormControlType
fieldAttributes.put("Control Type", field.getFormControl().getFormControlType().name());
//Handles Submit Fields
if (field.getFormControl().getFormControlType().isSubmit()) {
List<String> submitFields = new ArrayList<String>();
for (String value : field.getPredefinedValues()){
String finalValue = this.valueGenerator.getValue(uri, url, fieldId, value, definedValues, envAttributes, fieldAttributes);
submitFields.add(finalValue);
}
return submitFields;
}
// Get its value(s)
List<String> values = field.getValues();
String defaultValue;
//If the field has a value attribute present(Predefined value)
//Should store the value being submitted to be passed to the ValueGenerator
if(field.getFormControl().getAttributesMap().containsKey("value")){
defaultValue = field.getFormControl().getAttributesMap().get("value");
}
if (log.isDebugEnabled()) {
log.debug("Existing values: " + values);
}
// If there are no values at all or only an empty value
if (values.isEmpty() || (values.size() == 1 && values.get(0).isEmpty())) {
defaultValue = DEFAULT_EMPTY_VALUE;
// Check if we can use predefined values
Collection<String> predefValues = field.getPredefinedValues();
if (!predefValues.isEmpty()) {
//Store those predefined values in a list for the DefaultValueGenerator
definedValues.addAll(predefValues);
// Try first elements
Iterator<String> iterator = predefValues.iterator();
defaultValue = iterator.next();
// If there are more values, don't use the first, as it usually is a "No select"
// item
if (iterator.hasNext()) {
defaultValue = iterator.next();
}
}
} else {
defaultValue = values.get(0);
}
//Get the default value used in DefaultValueGenerator
String finalValue = this.valueGenerator.getValue(uri, url, fieldId, defaultValue, definedValues, envAttributes, fieldAttributes);
log.debug("Generated: " + finalValue + "For field " + field.getName());
values = new ArrayList<>(1);
values.add(finalValue);
return values;
}
|
python
|
def _route(self):
"""Define route."""
# REST API
self._app.route('/api/%s/config' % self.API_VERSION, method="GET",
callback=self._api_config)
self._app.route('/api/%s/config/<item>' % self.API_VERSION, method="GET",
callback=self._api_config_item)
self._app.route('/api/%s/args' % self.API_VERSION, method="GET",
callback=self._api_args)
self._app.route('/api/%s/args/<item>' % self.API_VERSION, method="GET",
callback=self._api_args_item)
self._app.route('/api/%s/help' % self.API_VERSION, method="GET",
callback=self._api_help)
self._app.route('/api/%s/pluginslist' % self.API_VERSION, method="GET",
callback=self._api_plugins)
self._app.route('/api/%s/all' % self.API_VERSION, method="GET",
callback=self._api_all)
self._app.route('/api/%s/all/limits' % self.API_VERSION, method="GET",
callback=self._api_all_limits)
self._app.route('/api/%s/all/views' % self.API_VERSION, method="GET",
callback=self._api_all_views)
self._app.route('/api/%s/<plugin>' % self.API_VERSION, method="GET",
callback=self._api)
self._app.route('/api/%s/<plugin>/history' % self.API_VERSION, method="GET",
callback=self._api_history)
self._app.route('/api/%s/<plugin>/history/<nb:int>' % self.API_VERSION, method="GET",
callback=self._api_history)
self._app.route('/api/%s/<plugin>/limits' % self.API_VERSION, method="GET",
callback=self._api_limits)
self._app.route('/api/%s/<plugin>/views' % self.API_VERSION, method="GET",
callback=self._api_views)
self._app.route('/api/%s/<plugin>/<item>' % self.API_VERSION, method="GET",
callback=self._api_item)
self._app.route('/api/%s/<plugin>/<item>/history' % self.API_VERSION, method="GET",
callback=self._api_item_history)
self._app.route('/api/%s/<plugin>/<item>/history/<nb:int>' % self.API_VERSION, method="GET",
callback=self._api_item_history)
self._app.route('/api/%s/<plugin>/<item>/<value>' % self.API_VERSION, method="GET",
callback=self._api_value)
bindmsg = 'Glances RESTful API Server started on {}api/{}/'.format(self.bind_url,
self.API_VERSION)
logger.info(bindmsg)
# WEB UI
if not self.args.disable_webui:
self._app.route('/', method="GET", callback=self._index)
self._app.route('/<refresh_time:int>', method=["GET"], callback=self._index)
self._app.route('/<filepath:path>', method="GET", callback=self._resource)
bindmsg = 'Glances Web User Interface started on {}'.format(self.bind_url)
logger.info(bindmsg)
else:
logger.info('The WebUI is disable (--disable-webui)')
print(bindmsg)
|
java
|
private void unbox(Expression expr, PrimitiveType primitiveType) {
TypeElement boxedClass = findBoxedSuperclass(expr.getTypeMirror());
if (primitiveType == null && boxedClass != null) {
primitiveType = typeUtil.unboxedType(boxedClass.asType());
}
if (primitiveType == null) {
return;
}
ExecutableElement valueMethod = ElementUtil.findMethod(
boxedClass, TypeUtil.getName(primitiveType) + VALUE_METHOD);
assert valueMethod != null : "could not find value method for " + boxedClass;
MethodInvocation invocation = new MethodInvocation(new ExecutablePair(valueMethod), null);
expr.replaceWith(invocation);
invocation.setExpression(expr);
}
|
java
|
protected void blockTag() {
int p = bp;
try {
nextChar();
if (isIdentifierStart(ch)) {
String name = readTagName();
TagParser tp = tagParsers.get(name);
if (tp == null) {
blockContent();
} else {
switch (tp.getKind()) {
case BLOCK:
tp.parse(p);
return;
case INLINE:
return;
}
}
}
blockContent();
} catch (ParseException e) {
blockContent();
}
}
|
python
|
def _load_pil_image(self, filename):
"""
Load image using PIL.
"""
self._channel_data = []
self._original_channel_data = []
im = Image.open(filename)
self._image = ImageOps.grayscale(im)
im.load()
file_data = np.asarray(im, float)
file_data = file_data / file_data.max()
# if the image has more than one channel, load them
if( len(file_data.shape) == 3 ):
num_channels = file_data.shape[2]
for i in range(num_channels):
self._channel_data.append( file_data[:, :, i])
self._original_channel_data.append( file_data[:, :, i] )
|
java
|
@Override
public int read (@Nonnull final byte [] b,
@Nonnegative final int nOfs,
@Nonnegative final int nLen) throws IOException
{
ValueEnforcer.isArrayOfsLen (b, nOfs, nLen);
final int res = super.read (b, nOfs, nLen);
if (res > 0)
{
m_nCount += res;
_checkLimit ();
}
return res;
}
|
java
|
public void free()
{
if (m_PDatabase != null)
m_PDatabase.removePTable(this);
m_PDatabase = null;
if (m_VKeyList != null)
{
for (int i = m_VKeyList.size() - 1 ; i >= 0 ;i--) {
PKeyArea vKeyArea = (PKeyArea)m_VKeyList.elementAt(i);
vKeyArea.free();
}
m_VKeyList.removeAllElements();
}
m_VKeyList = null;
if (m_setPTableOwners != null)
{
for (ThinPhysicalTableOwner owner : m_setPTableOwners)
{
this.removePTableOwner(owner, false);
}
}
}
|
python
|
def get_skill_entry(name, skills_data) -> dict:
""" Find a skill entry in the skills_data and returns it. """
for e in skills_data.get('skills', []):
if e.get('name') == name:
return e
return {}
|
python
|
def stk_description_metadata(description):
"""Return metadata from MetaMorph image description as list of dict.
The MetaMorph image description format is unspecified. Expect failures.
"""
description = description.strip()
if not description:
return []
try:
description = bytes2str(description)
except UnicodeDecodeError as exc:
log.warning('stk_description_metadata: %s: %s',
exc.__class__.__name__, exc)
return []
result = []
for plane in description.split('\x00'):
d = {}
for line in plane.split('\r\n'):
line = line.split(':', 1)
if len(line) > 1:
name, value = line
d[name.strip()] = astype(value.strip())
else:
value = line[0].strip()
if value:
if '' in d:
d[''].append(value)
else:
d[''] = [value]
result.append(d)
return result
|
python
|
def determine_hostname(display_name=None):
"""
Find fqdn if we can
"""
if display_name:
# if display_name is provided, just return the given name
return display_name
else:
socket_gethostname = socket.gethostname()
socket_fqdn = socket.getfqdn()
try:
socket_ex = socket.gethostbyname_ex(socket_gethostname)[0]
except (LookupError, socket.gaierror):
socket_ex = ''
gethostname_len = len(socket_gethostname)
fqdn_len = len(socket_fqdn)
ex_len = len(socket_ex)
if fqdn_len > gethostname_len or ex_len > gethostname_len:
if "localhost" not in socket_ex and len(socket_ex):
return socket_ex
if "localhost" not in socket_fqdn:
return socket_fqdn
return socket_gethostname
|
java
|
private void accountForIncludedDir(String name, File file, boolean fast) {
processIncluded(name, file, dirsIncluded, dirsExcluded, dirsDeselected);
if (fast && couldHoldIncluded(name) && !contentsExcluded(name)) {
scandir(file, name + File.separator, fast);
}
}
|
python
|
def get_spark_session(enable_hive=False, app_name='marvin-engine', configs=[]):
"""Return a Spark Session object"""
# Prepare spark context to be used
import findspark
findspark.init()
from pyspark.sql import SparkSession
# prepare spark sesseion to be returned
spark = SparkSession.builder
spark = spark.appName(app_name)
spark = spark.enableHiveSupport() if enable_hive else spark
# if has configs
for config in configs:
spark = spark.config(config)
return spark.getOrCreate()
|
python
|
def get_consumption(self, service_location_id, start, end, aggregation, raw=False):
"""
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id,
"consumption")
d = self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation)
if not raw:
for block in d['consumptions']:
if 'alwaysOn' not in block.keys():
break
block.update({'alwaysOn': block['alwaysOn'] / 12})
return d
|
java
|
static AuthenticationType convertAuthToEnum(int resAuthType)
{
AuthenticationType authType = AuthenticationType.CONTAINER;
if (resAuthType == ResourceRef.AUTH_APPLICATION)
{
authType = AuthenticationType.APPLICATION;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "convertAuthToEnum : " + resAuthType + " -> " + authType);
return authType;
}
|
java
|
public static <T> T[] add(final T[] ar, final T e) {
final int s = ar.length;
final T[] t = Arrays.copyOf(ar, s + 1);
t[s] = e;
return t;
}
|
python
|
def calculate_pore_volume(self):
"""
Return the intrinsic pore volume.
Returns
-------
:class:`float`
The intrinsic pore volume.
"""
self.pore_volume = sphere_volume(self.calculate_pore_diameter() / 2)
self.properties['pore_volume'] = self.pore_volume
return self.pore_volume
|
java
|
public void marshall(ResourceConfiguration resourceConfiguration, ProtocolMarshaller protocolMarshaller) {
if (resourceConfiguration == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(resourceConfiguration.getComputeType(), COMPUTETYPE_BINDING);
protocolMarshaller.marshall(resourceConfiguration.getVolumeSizeInGB(), VOLUMESIZEINGB_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
public static CmsXmlContent unmarshal(CmsObject cms, CmsResource resource, ServletRequest req)
throws CmsXmlException, CmsLoaderException, CmsException {
String rootPath = resource.getRootPath();
if (!CmsResourceTypeXmlContent.isXmlContent(resource)) {
// sanity check: resource must be of type XML content
throw new CmsXmlException(
Messages.get().container(Messages.ERR_XMLCONTENT_INVALID_TYPE_1, cms.getSitePath(resource)));
}
// try to get the requested content from the current request attribute
// this is also necessary for historic versions that have been loaded
CmsXmlContent content = (CmsXmlContent)req.getAttribute(rootPath);
if (content == null) {
// unmarshal XML structure from the file content
CmsFile file = resource instanceof CmsFile ? (CmsFile)resource : cms.readFile(resource);
content = unmarshal(cms, file);
// store the content as request attribute for future read requests
req.setAttribute(rootPath, content);
}
// return the result
return content;
}
|
java
|
public void connect(Context context, BeanListener listener) {
lastKnownContext = context;
beanListener = listener;
gattClient.connect(context, device);
}
|
python
|
def parser(self):
"""Create the argparser_ for this configuration by adding all
settings via the :meth:`Setting.add_argument` method.
:rtype: an instance of :class:`ArgumentParser`.
"""
parser = argparse.ArgumentParser(description=self.description,
epilog=self.epilog)
parser.add_argument('--version',
action='version',
version=self.version)
return self.add_to_parser(parser)
|
python
|
def get_parser():
"""Return :py:class:`argparse.ArgumentParser` instance for CLI."""
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
|
java
|
@Override
@RequestCache
public boolean canPrincipalSubscribe(
IAuthorizationPrincipal principal, String portletDefinitionId) {
String owner = IPermission.PORTAL_SUBSCRIBE;
// retrieve the indicated channel from the channel registry store and
// determine its current lifecycle state
IPortletDefinition portlet =
this.portletDefinitionRegistry.getPortletDefinition(portletDefinitionId);
if (portlet == null) {
return false;
}
String target = PermissionHelper.permissionTargetIdForPortletDefinition(portlet);
PortletLifecycleState state = portlet.getLifecycleState();
/*
* Each channel lifecycle state now has its own subscribe permission. The
* following logic checks the appropriate permission for the lifecycle.
*/
String permission;
if (state.equals(PortletLifecycleState.PUBLISHED)
|| state.equals(PortletLifecycleState.MAINTENANCE)) {
// NB: There is no separate SUBSCRIBE permission for MAINTENANCE
// mode; everyone simply sees the 'out of service' message
permission = IPermission.PORTLET_SUBSCRIBER_ACTIVITY;
} else if (state.equals(PortletLifecycleState.APPROVED)) {
permission = IPermission.PORTLET_SUBSCRIBER_APPROVED_ACTIVITY;
} else if (state.equals(PortletLifecycleState.CREATED)) {
permission = IPermission.PORTLET_SUBSCRIBER_CREATED_ACTIVITY;
} else if (state.equals(PortletLifecycleState.EXPIRED)) {
permission = IPermission.PORTLET_SUBSCRIBER_EXPIRED_ACTIVITY;
} else {
throw new AuthorizationException(
"Unrecognized lifecycle state for channel " + portletDefinitionId);
}
// Test the appropriate permission.
return doesPrincipalHavePermission(principal, owner, permission, target);
}
|
python
|
def importaccount(ctx, account, role):
""" Import an account using an account password
"""
from peerplaysbase.account import PasswordKey
password = click.prompt("Account Passphrase", hide_input=True)
account = Account(account, peerplays_instance=ctx.peerplays)
imported = False
if role == "owner":
owner_key = PasswordKey(account["name"], password, role="owner")
owner_pubkey = format(
owner_key.get_public_key(), ctx.peerplays.rpc.chain_params["prefix"]
)
if owner_pubkey in [x[0] for x in account["owner"]["key_auths"]]:
click.echo("Importing owner key!")
owner_privkey = owner_key.get_private_key()
ctx.peerplays.wallet.addPrivateKey(owner_privkey)
imported = True
if role == "active":
active_key = PasswordKey(account["name"], password, role="active")
active_pubkey = format(
active_key.get_public_key(), ctx.peerplays.rpc.chain_params["prefix"]
)
if active_pubkey in [x[0] for x in account["active"]["key_auths"]]:
click.echo("Importing active key!")
active_privkey = active_key.get_private_key()
ctx.peerplays.wallet.addPrivateKey(active_privkey)
imported = True
if role == "memo":
memo_key = PasswordKey(account["name"], password, role=role)
memo_pubkey = format(
memo_key.get_public_key(), ctx.peerplays.rpc.chain_params["prefix"]
)
if memo_pubkey == account["memo_key"]:
click.echo("Importing memo key!")
memo_privkey = memo_key.get_private_key()
ctx.peerplays.wallet.addPrivateKey(memo_privkey)
imported = True
if not imported:
click.echo("No matching key(s) found. Password correct?")
|
java
|
public Map<String, Object> getAllAccumulatorResults() {
return accumulatorResults.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().getUnchecked()));
}
|
python
|
def join_url_params(dic):
"""
根据传入的键值对,拼接 url 后面 ? 的参数,比如 ?key1=value1&key2=value2
:param:
* dic: (dict) 参数键值对
:return:
* result: (string) 拼接好的参数
举例如下::
print('--- splice_url_params demo ---')
dic1 = {'key1': 'value1', 'key2': 'value2'}
print(splice_url_params(dic1))
print('---')
执行结果::
--- splice_url_params demo ---
?key1=value1&key2=value2
---
"""
od = OrderedDict(sorted(dic.items()))
url = '?'
temp_str = urlencode(od)
url = url + temp_str
return url
|
java
|
@Override
public <E> E style(E element, Object data) throws VectorPrintException {
if (getValue(DYNAMICDATA, Boolean.class)) {
setData(convert(data));
}
return element;
}
|
python
|
def upload_file(self, file):
"""The method is posting file to the remote server"""
url = self._get_url('/api/1.0/upload/post')
fcontent = FileContent(file)
binary_data = fcontent.get_binary()
headers = self._get_request_headers()
req = urllib.request.Request(url, binary_data, headers)
req.add_header('Content-type', fcontent.get_content_type())
req.add_header('Content-length', len(binary_data))
resp = urllib.request.urlopen(req)
return definition.UploadPostResponse(_response_to_json(resp))
|
python
|
def get_alert(self, id, **kwargs): # noqa: E501
"""Get a specific alert # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerAlert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_alert_with_http_info(id, **kwargs) # noqa: E501
return data
|
java
|
public void deregister() {
if(objectName == null) {
return;
}
try {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
if (mbs.isRegistered(objectName)) {
mbs.unregisterMBean(objectName);
}
objectName = null;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
|
java
|
public void marshall(RegisterDeviceRequest registerDeviceRequest, ProtocolMarshaller protocolMarshaller) {
if (registerDeviceRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(registerDeviceRequest.getIdentityPoolId(), IDENTITYPOOLID_BINDING);
protocolMarshaller.marshall(registerDeviceRequest.getIdentityId(), IDENTITYID_BINDING);
protocolMarshaller.marshall(registerDeviceRequest.getPlatform(), PLATFORM_BINDING);
protocolMarshaller.marshall(registerDeviceRequest.getToken(), TOKEN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
private static int med3(int a, int b, int c, IntComparator comp) {
int ab = comp.compare(a,b);
int ac = comp.compare(a,c);
int bc = comp.compare(b,c);
return (ab<0 ?
(bc<0 ? b : ac<0 ? c : a) :
(bc>0 ? b : ac>0 ? c : a));
}
|
java
|
@Override
public InstallPolicy getInstallPolicy() {
if (_asset.getWlpInformation() == null) {
return null;
}
return _asset.getWlpInformation().getInstallPolicy();
}
|
java
|
public List<CmsRewriteAlias> getRewriteAliases(CmsObject cms, String siteRoot) throws CmsException {
CmsRewriteAliasFilter filter = new CmsRewriteAliasFilter().setSiteRoot(siteRoot);
List<CmsRewriteAlias> result = m_securityManager.getRewriteAliases(cms.getRequestContext(), filter);
return result;
}
|
java
|
public void init(RemoteTask server, BaseMessageQueue messageQueue)
throws RemoteException
{
super.init(server, messageQueue);
m_receiveQueueThread = new Thread(new ReceiveQueueWorker(), "ReceiveQueueWorker");
m_receiveQueueThread.start();
}
|
python
|
def deploy(self, args, **extra_args):
"""Deploy a docker container to a specific container ship (host)
:param args:
:type args:
"""
if not isinstance(args, argparse.Namespace):
raise TypeError(logger.error("args should of an instance of argparse.Namespace"))
# create new freight forwarder
freight_forwarder = FreightForwarder()
# create commercial invoice this is the contact given to freight forwarder to dispatch containers and images
commercial_invoice = freight_forwarder.commercial_invoice(
'deploy',
args.data_center,
args.environment,
args.service
)
# deploy containers.
bill_of_lading = freight_forwarder.deploy_containers(commercial_invoice, args.tag, args.env)
# pretty lame... Need to work on return values through to app to make them consistent.
exit_code = 0 if bill_of_lading else 1
if exit_code != 0:
exit(exit_code)
|
java
|
public static void print(Matrix m, Control control, NumberFormat format) {
print(m, control, new NumberFormatNumberFormatter(format));
}
|
java
|
public Email bcc(EmailAddress... bcc) {
for (EmailAddress b : bcc) {
recipient(b, RecipientType.BCC);
}
return this;
}
|
python
|
def is_multifile_object_without_children(self, location: str) -> bool:
"""
Returns True if an item with this location is present as a multifile object without children.
For this implementation, this means that there is a file with the appropriate name but without extension
:param location:
:return:
"""
# (1) Find the base directory and base name
if isdir(location): # special case: parent location is the root folder where all the files are.
return len(self.find_multifile_object_children(location)) == 0
else:
# TODO same comment than in find_multifile_object_children
if exists(location):
# location is a file without extension. We can accept that as being a multifile object without children
return True
else:
return False
|
java
|
protected Map getDeviceTokenResponseModel(final OAuth20AccessTokenResponseResult result) {
val model = new LinkedHashMap<String, Object>();
val uri = result.getCasProperties().getServer().getPrefix()
.concat(OAuth20Constants.BASE_OAUTH20_URL)
.concat("/")
.concat(OAuth20Constants.DEVICE_AUTHZ_URL);
model.put(OAuth20Constants.DEVICE_VERIFICATION_URI, uri);
model.put(OAuth20Constants.EXPIRES_IN, result.getDeviceTokenTimeout());
val generatedToken = result.getGeneratedToken();
generatedToken.getUserCode().ifPresent(c -> model.put(OAuth20Constants.DEVICE_USER_CODE, c));
generatedToken.getDeviceCode().ifPresent(c -> model.put(OAuth20Constants.DEVICE_CODE, c));
model.put(OAuth20Constants.DEVICE_INTERVAL, result.getDeviceRefreshInterval());
return model;
}
|
python
|
def create_examples_train(candidate_dialog_paths, rng, positive_probability=0.5, max_context_length=20):
"""
Creates single training example.
:param candidate_dialog_paths:
:param rng:
:param positive_probability: probability of selecting positive training example
:return:
"""
i = 0
examples = []
for context_dialog in candidate_dialog_paths:
if i % 1000 == 0:
print str(i)
dialog_path = candidate_dialog_paths[i]
examples.append(create_single_dialog_train_example(dialog_path, candidate_dialog_paths, rng, positive_probability,
max_context_length=max_context_length))
i += 1
|
java
|
public static base_responses add(nitro_service client, nssimpleacl6 resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
nssimpleacl6 addresources[] = new nssimpleacl6[resources.length];
for (int i=0;i<resources.length;i++){
addresources[i] = new nssimpleacl6();
addresources[i].aclname = resources[i].aclname;
addresources[i].td = resources[i].td;
addresources[i].aclaction = resources[i].aclaction;
addresources[i].srcipv6 = resources[i].srcipv6;
addresources[i].destport = resources[i].destport;
addresources[i].protocol = resources[i].protocol;
addresources[i].ttl = resources[i].ttl;
}
result = add_bulk_request(client, addresources);
}
return result;
}
|
python
|
def stringify_summary(summary):
""" stringify summary, in order to dump json file and generate html report.
"""
for index, suite_summary in enumerate(summary["details"]):
if not suite_summary.get("name"):
suite_summary["name"] = "testcase {}".format(index)
for record in suite_summary.get("records"):
meta_datas = record['meta_datas']
__stringify_meta_datas(meta_datas)
meta_datas_expanded = []
__expand_meta_datas(meta_datas, meta_datas_expanded)
record["meta_datas_expanded"] = meta_datas_expanded
record["response_time"] = __get_total_response_time(meta_datas_expanded)
|
java
|
@Override
public ListOTAUpdatesResult listOTAUpdates(ListOTAUpdatesRequest request) {
request = beforeClientExecution(request);
return executeListOTAUpdates(request);
}
|
python
|
def is_answer_valid(self, ans):
"""Validate user's answer against available choices."""
return ans in [str(i+1) for i in range(len(self.choices))]
|
python
|
def get_api_url(self, lambda_name, stage_name):
"""
Given a lambda_name and stage_name, return a valid API URL.
"""
api_id = self.get_api_id(lambda_name)
if api_id:
return "https://{}.execute-api.{}.amazonaws.com/{}".format(api_id, self.boto_session.region_name, stage_name)
else:
return None
|
python
|
def disable_contact_host_notifications(self, contact):
"""Disable host notifications for a contact
Format of the line that triggers function call::
DISABLE_CONTACT_HOST_NOTIFICATIONS;<contact_name>
:param contact: contact to disable
:type contact: alignak.objects.contact.Contact
:return: None
"""
if contact.host_notifications_enabled:
contact.modified_attributes |= DICT_MODATTR["MODATTR_NOTIFICATIONS_ENABLED"].value
contact.host_notifications_enabled = False
self.send_an_element(contact.get_update_status_brok())
|
java
|
@SuppressWarnings("unchecked")
public static <S, I, O> ExtensionResult<S, I, O> empty() {
return (ExtensionResult<S, I, O>) EMPTY;
}
|
python
|
def padded_accuracy(predictions,
labels,
weights_fn=common_layers.weights_nonzero):
"""Percentage of times that predictions matches labels on non-0s."""
# If the last dimension is 1 then we're using L1/L2 loss.
if common_layers.shape_list(predictions)[-1] == 1:
return rounding_accuracy(predictions, labels, weights_fn=weights_fn)
with tf.variable_scope("padded_accuracy", values=[predictions, labels]):
padded_predictions, padded_labels = common_layers.pad_with_zeros(
predictions, labels)
weights = weights_fn(padded_labels)
outputs = tf.to_int32(tf.argmax(padded_predictions, axis=-1))
padded_labels = tf.to_int32(padded_labels)
return tf.to_float(tf.equal(outputs, padded_labels)), weights
|
java
|
private static Type processTypeForDescendantLookup(Type type) {
if (type instanceof ParameterizedType) {
return ((ParameterizedType) type).getRawType();
} else {
return type;
}
}
|
java
|
public <T> Launch<T> newLaunch(Class<? extends T> programClass)
throws LaunchException {
return newLaunch(programClass, null);
}
|
python
|
def get_prep_value(self, value):
"Returns field's value prepared for saving into a database."
value = self.get_clean_value(value)
if self.multiple:
if value:
value = ",".join(value)
else:
value = ""
return super(CharField, self).get_prep_value(value)
|
python
|
def iterate_sheets(self, *args, **kwargs):
"""Opens self.filename and reads it with a csv reader.
If self.filename ends with .gz, the file will be decompressed with gzip before being passed
to csv.reader. If the filename is not a string, it is assumed to be a file-like object which
will be passed directly to csv.reader.
"""
if isinstance(self.filename, str):
if self.filename.endswith(".gz") or self.is_gzipped:
with gzip.open(self.filename, "rt") as rfile:
reader = csv.reader(rfile, *args, **kwargs)
yield list(reader)
else:
with open(self.filename, "r") as rfile:
reader = csv.reader(rfile, *args, **kwargs)
yield list(reader)
else:
reader = csv.reader(self.filename, *args, **kwargs)
yield list(reader)
|
python
|
def enumerateURL(urlDict, outputFolder, startIndex= 0, maxErrors = 100):
"""
Function that performs the enumeration itself.
"""
for i, url in enumerate(urlDict.keys()):
# Grabbing domain name:
domain = re.findall("://(.*)/", url)[0]
# Defining the starting index
index = startIndex
# The app will stop when this value reaches maxErrors
consecutiveErrors = 0
i3Browser = browser.Browser()
# Main loop that checks if the maximum number of errors has been reached
while consecutiveErrors <= maxErrors:
# creating the new URL to download
newQuery = url.replace("<INDEX>", str(index))
print(newQuery)
# Downloading the file
try:
data = i3Browser.recoverURL(newQuery)
filename = domain.replace("/", "|") + "_" + "-profile_" + str(index).rjust(10, "0") +".html"
if urlDict[url] != None:
if urlDict[url] in data:
print(general.info("Storing resource as:\t" + filename + "..."))
# The profile was found so we will store it:
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
else:
# The profile was found so we will store it:
print(general.info("Storing resource as:\t" + filename + "..."))
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
except:
pass
#logger.error("The resource could not be downloaded.")
index+=1
|
python
|
def inserir(self, name):
"""Inserts a new Division Dc and returns its identifier.
:param name: Division Dc name. String with a minimum 2 and maximum of 80 characters
:return: Dictionary with the following structure:
::
{'division_dc': {'id': < id_division_dc >}}
:raise InvalidParameterError: Name is null and invalid.
:raise NomeDivisaoDcDuplicadoError: There is already a registered Division Dc with the value of name.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
division_dc_map = dict()
division_dc_map['name'] = name
code, xml = self.submit(
{'division_dc': division_dc_map}, 'POST', 'divisiondc/')
return self.response(code, xml)
|
java
|
public int rank() {
if( is64 ) {
return SingularOps_DDRM.rank((SingularValueDecomposition_F64)svd, tol);
} else {
return SingularOps_FDRM.rank((SingularValueDecomposition_F32)svd, (float)tol);
}
}
|
python
|
def eval_table(tbl, expression, vm='python', blen=None, storage=None,
create='array', vm_kwargs=None, **kwargs):
"""Evaluate `expression` against columns of a table."""
# setup
storage = _util.get_storage(storage)
names, columns = _util.check_table_like(tbl)
length = len(columns[0])
if vm_kwargs is None:
vm_kwargs = dict()
# setup vm
if vm == 'numexpr':
import numexpr
evaluate = numexpr.evaluate
elif vm == 'python':
# noinspection PyUnusedLocal
def evaluate(expr, local_dict=None, **kw):
# takes no keyword arguments
return eval(expr, dict(), local_dict)
else:
raise ValueError('expected vm either "numexpr" or "python"')
# compile expression and get required columns
variables = _get_expression_variables(expression, vm)
required_columns = {v: columns[names.index(v)] for v in variables}
# determine block size for evaluation
blen = _util.get_blen_table(required_columns, blen=blen)
# build output
out = None
for i in range(0, length, blen):
j = min(i+blen, length)
blocals = {v: c[i:j] for v, c in required_columns.items()}
res = evaluate(expression, local_dict=blocals, **vm_kwargs)
if out is None:
out = getattr(storage, create)(res, expectedlen=length, **kwargs)
else:
out.append(res)
return out
|
python
|
def closed(self, code, reason=None):
"""Called when the connection is closed"""
self.emit('socket_closed', code, reason)
self._recover_network_failure()
|
java
|
private static ApplicationLoadResult loadApplication(
File projectDirectory,
ApplicationTemplateDescriptor appDescriptor,
ApplicationLoadResult result ) {
ApplicationTemplate app = result.applicationTemplate;
result.applicationTemplate.setDirectory( projectDirectory );
// Load the graph
File graphDirectory = new File( projectDirectory, Constants.PROJECT_DIR_GRAPH );
GRAPH: if( ! graphDirectory.exists()) {
RoboconfError error = new RoboconfError( ErrorCode.PROJ_NO_GRAPH_DIR, directory( projectDirectory ));
result.loadErrors.add( error );
} else if( appDescriptor != null
&& ! Utils.isEmptyOrWhitespaces( appDescriptor.getGraphEntryPoint())) {
File mainGraphFile = new File( graphDirectory, appDescriptor.getGraphEntryPoint());
if( ! mainGraphFile.exists()) {
RoboconfError error = new RoboconfError( ErrorCode.PROJ_MISSING_GRAPH_EP, expected( mainGraphFile.getAbsolutePath()));
result.loadErrors.add( error );
break GRAPH;
}
Graphs graphs = loadGraph( mainGraphFile, graphDirectory, result );
app.setGraphs( graphs );
}
// Load the instances
File instDirectory = new File( projectDirectory, Constants.PROJECT_DIR_INSTANCES );
INST: if( appDescriptor != null && instDirectory.exists()) {
if( app.getGraphs() == null ) {
result.loadErrors.add( new RoboconfError( ErrorCode.CO_GRAPH_COULD_NOT_BE_BUILT ));
break INST;
}
if( Utils.isEmptyOrWhitespaces( appDescriptor.getInstanceEntryPoint()))
break INST;
File mainInstFile = new File( instDirectory, appDescriptor.getInstanceEntryPoint());
InstancesLoadResult ilr = loadInstances( mainInstFile, instDirectory, app.getGraphs(), app.getName());
result.getParsedFiles().addAll( ilr.getParsedFiles());
result.objectToSource.putAll( ilr.getObjectToSource());
result.loadErrors.addAll( ilr.getLoadErrors());
app.getRootInstances().addAll( ilr.getRootInstances());
}
// Commands
File commandsDirectory = new File( projectDirectory, Constants.PROJECT_DIR_COMMANDS );
List<String> commandNames = new ArrayList<> ();
if( app.getGraphs() != null && commandsDirectory.exists()) {
for( File f : Utils.listAllFiles( commandsDirectory )) {
if( ! f.getName().endsWith( Constants.FILE_EXT_COMMANDS )) {
result.loadErrors.add( new RoboconfError( ErrorCode.PROJ_INVALID_COMMAND_EXT ));
} else {
CommandsParser parser = new CommandsParser( app, f );
result.loadErrors.addAll( parser.getParsingErrors());
commandNames.add( f.getName().replace( Constants.FILE_EXT_COMMANDS, "" ));
}
}
}
// Autonomic
File autonomicRulesDirectory = new File( projectDirectory, Constants.PROJECT_DIR_RULES_AUTONOMIC );
if( app.getGraphs() != null && autonomicRulesDirectory.exists()) {
for( File f : Utils.listAllFiles( autonomicRulesDirectory )) {
if( ! f.getName().endsWith( Constants.FILE_EXT_RULE )) {
result.loadErrors.add( new RoboconfError( ErrorCode.PROJ_INVALID_RULE_EXT ));
} else {
// Parsing errors
RuleParser parser = new RuleParser( f );
result.loadErrors.addAll( parser.getParsingErrors());
// Invalid references to commands?
List<String> coll = new ArrayList<>( parser.getRule().getCommandsToInvoke());
coll.removeAll( commandNames );
for( String commandName : coll )
result.loadErrors.add( new RoboconfError( ErrorCode.RULE_UNKNOWN_COMMAND, name( commandName )));
}
}
}
// Check for files that are not reachable or not in the right directories
if( projectDirectory.isDirectory()) {
String[] exts = { Constants.FILE_EXT_GRAPH, Constants.FILE_EXT_INSTANCES };
File[] directories = { graphDirectory, instDirectory };
for( int i=0; i<exts.length; i++ ) {
List<File> files = Utils.listAllFiles( projectDirectory, exts[ i ]);
List<File> filesWithInvalidLocation = new ArrayList<> ();
for( File f : files ) {
if( ! Utils.isAncestor( directories[ i ], f )) {
result.loadErrors.add( new ParsingError( ErrorCode.PROJ_INVALID_FILE_LOCATION, f, 1 ));
filesWithInvalidLocation.add( f );
}
}
files.removeAll( result.getParsedFiles());
files.removeAll( filesWithInvalidLocation );
for( File f : files )
result.loadErrors.add( new ParsingError( ErrorCode.PROJ_UNREACHABLE_FILE, f, 1 ));
}
}
// Validate the entire application
if( ! RoboconfErrorHelpers.containsCriticalErrors( result.loadErrors )) {
Collection<ModelError> errors = RuntimeModelValidator.validate( app );
result.loadErrors.addAll( errors );
}
return result;
}
|
python
|
def wndifd(a, b):
"""
Place the difference of two double precision windows into
a third window.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wndifd_c.html
:param a: Input window A.
:type a: spiceypy.utils.support_types.SpiceCell
:param b: Input window B.
:type b: spiceypy.utils.support_types.SpiceCell
:return: Difference of a and b.
:rtype: spiceypy.utils.support_types.SpiceCell
"""
assert isinstance(a, stypes.SpiceCell)
assert isinstance(b, stypes.SpiceCell)
assert a.dtype == 1
assert b.dtype == 1
c = stypes.SpiceCell.double(a.size + b.size)
libspice.wndifd_c(ctypes.byref(a), ctypes.byref(b), ctypes.byref(c))
return c
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.