language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def dump(self, o_name=None, details=False, raw=False):
"""Dump an host (all hosts) from the arbiter.
The arbiter will get the host (all hosts) information from all its schedulers.
This gets the main host information from the scheduler. If details is set, then some
more information are provided. This will not get all the host known attributes but only
a reduced set that will inform about the host and its services status
If raw is set the information are provided in two string lists formated as CSV strings.
The first list element contains the hosts information and the second one contains the
services information.
If an host name is provided, this function will get only this host information, else
all the scheduler hosts are returned.
As an example (in raw format):
{
scheduler-master-3: [
[
"type;host;name;last_check;state_id;state;state_type;is_problem;
is_impact;output",
"localhost;host;localhost;1532451740;0;UP;HARD;False;False;
Host assumed to be UP",
"host_2;host;host_2;1532451988;1;DOWN;HARD;True;False;I am always Down"
],
[
"type;host;name",
"host_2;service;dummy_no_output;1532451981;0;OK;HARD;False;True;
Service internal check result: 0",
"host_2;service;dummy_warning;1532451960;4;UNREACHABLE;HARD;False;True;
host_2-dummy_warning-1",
"host_2;service;dummy_unreachable;1532451987;4;UNREACHABLE;HARD;False;True;
host_2-dummy_unreachable-4",
"host_2;service;dummy_random;1532451949;4;UNREACHABLE;HARD;False;True;
Service internal check result: 2",
"host_2;service;dummy_ok;1532452002;0;OK;HARD;False;True;host_2",
"host_2;service;dummy_critical;1532451953;4;UNREACHABLE;HARD;False;True;
host_2-dummy_critical-2",
"host_2;service;dummy_unknown;1532451945;4;UNREACHABLE;HARD;False;True;
host_2-dummy_unknown-3",
"host_2;service;dummy_echo;1532451973;4;UNREACHABLE;HARD;False;True;"
]
],
scheduler-master-2: [
[
"type;host;name;last_check;state_id;state;state_type;is_problem;is_impact;output",
"host_0;host;host_0;1532451993;0;UP;HARD;False;False;I am always Up",
"BR_host;host;BR_host;1532451991;0;UP;HARD;False;False;Host assumed to be UP"
],
[
"type;host;name;last_check;state_id;state;state_type;is_problem;is_impact;output",
"host_0;service;dummy_no_output;1532451970;0;OK;HARD;False;False;
Service internal check result: 0",
"host_0;service;dummy_unknown;1532451964;3;UNKNOWN;HARD;True;False;
host_0-dummy_unknown-3",
"host_0;service;dummy_random;1532451991;1;WARNING;HARD;True;False;
Service internal check result: 1",
"host_0;service;dummy_warning;1532451945;1;WARNING;HARD;True;False;
host_0-dummy_warning-1",
"host_0;service;dummy_unreachable;1532451986;4;UNREACHABLE;HARD;True;False;
host_0-dummy_unreachable-4",
"host_0;service;dummy_ok;1532452012;0;OK;HARD;False;False;host_0",
"host_0;service;dummy_critical;1532451987;2;CRITICAL;HARD;True;False;
host_0-dummy_critical-2",
"host_0;service;dummy_echo;1532451963;0;OK;HARD;False;False;",
"BR_host;service;dummy_critical;1532451970;2;CRITICAL;HARD;True;False;
BR_host-dummy_critical-2",
"BR_host;service;BR_Simple_And;1532451895;1;WARNING;HARD;True;True;",
"BR_host;service;dummy_unreachable;1532451981;4;UNREACHABLE;HARD;True;False;
BR_host-dummy_unreachable-4",
"BR_host;service;dummy_no_output;1532451975;0;OK;HARD;False;False;
Service internal check result: 0",
"BR_host;service;dummy_unknown;1532451955;3;UNKNOWN;HARD;True;False;
BR_host-dummy_unknown-3",
"BR_host;service;dummy_echo;1532451981;0;OK;HARD;False;False;",
"BR_host;service;dummy_warning;1532451972;1;WARNING;HARD;True;False;
BR_host-dummy_warning-1",
"BR_host;service;dummy_random;1532451976;4;UNREACHABLE;HARD;True;False;
Service internal check result: 4",
"BR_host;service;dummy_ok;1532451972;0;OK;HARD;False;False;BR_host"
]
],
...
More information are available in the scheduler correponding API endpoint.
:param o_type: searched object type
:type o_type: str
:param o_name: searched object name (or uuid)
:type o_name: str
:return: serialized object information
:rtype: str
"""
if details is not False:
details = bool(details)
if raw is not False:
raw = bool(raw)
res = {}
for scheduler_link in self.app.conf.schedulers:
sched_res = scheduler_link.con.get('dump', {'o_name': o_name,
'details': '1' if details else '',
'raw': '1' if raw else ''},
wait=True)
if isinstance(sched_res, dict) and \
'_status' in sched_res and sched_res['_status'] == 'ERR':
continue
res[scheduler_link.name] = sched_res
return res
|
java
|
private void addInfoFromESS(EntitySrcSyn ess, int eId, EntityInfo c) {
c.setOrganismCommon(ess.getOrganism_common_name());
c.setOrganismScientific(ess.getOrganism_scientific());
c.setOrganismTaxId(ess.getNcbi_taxonomy_id());
}
|
java
|
public EventFilter withEntityArns(String... entityArns) {
if (this.entityArns == null) {
setEntityArns(new java.util.ArrayList<String>(entityArns.length));
}
for (String ele : entityArns) {
this.entityArns.add(ele);
}
return this;
}
|
python
|
def handle(self, key, value):
'''
Processes a vaild action info request
@param key: The key that matched the request
@param value: The value associated with the key
'''
# the master dict to return
master = {}
master['uuid'] = value
master['total_pending'] = 0
master['server_time'] = int(self.get_current_time())
# break down key
elements = key.split(":")
dict = {}
dict['spiderid'] = elements[1]
dict['appid'] = elements[2]
# log we received the info message
extras = self.get_log_dict('info', dict['appid'],
dict['spiderid'], master['uuid'])
if len(elements) == 4:
dict['crawlid'] = elements[3]
extras = self.get_log_dict('info', dict['appid'],
dict['spiderid'], master['uuid'],
elements[3])
self.logger.info('Received info request', extra=extras)
# generate the information requested
if 'crawlid' in dict:
master = self._build_crawlid_info(master, dict)
else:
master = self._build_appid_info(master, dict)
if self._send_to_kafka(master):
extras['success'] = True
self.logger.info('Sent info to kafka', extra=extras)
else:
extras['success'] = False
self.logger.error('Failed to send info to kafka',
extra=extras)
|
java
|
public JacksonDBCollection<T, K> enable(Feature feature) {
features.put(feature, true);
return this;
}
|
java
|
public void setFNNRGLen(Integer newFNNRGLen) {
Integer oldFNNRGLen = fnnrgLen;
fnnrgLen = newFNNRGLen;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.FNC__FNNRG_LEN, oldFNNRGLen, fnnrgLen));
}
|
python
|
def check_lazy_load_wegsegment(f):
'''
Decorator function to lazy load a :class:`Wegsegment`.
'''
def wrapper(*args):
wegsegment = args[0]
if (
wegsegment._methode_id is None or
wegsegment._geometrie is None or
wegsegment._metadata is None
):
log.debug('Lazy loading Wegsegment %d', wegsegment.id)
wegsegment.check_gateway()
w = wegsegment.gateway.get_wegsegment_by_id(wegsegment.id)
wegsegment._methode_id = w._methode_id
wegsegment._geometrie = w._geometrie
wegsegment._metadata = w._metadata
return f(*args)
return wrapper
|
python
|
def get_event(self,
request,
tag='',
matcher=prefix_matcher.__func__,
callback=None,
timeout=None
):
'''
Get an event (asynchronous of course) return a future that will get it later
'''
# if the request finished, no reason to allow event fetching, since we
# can't send back to the client
if request._finished:
future = Future()
future.set_exception(TimeoutException())
return future
future = Future()
if callback is not None:
def handle_future(future):
tornado.ioloop.IOLoop.current().add_callback(callback, future)
future.add_done_callback(handle_future)
# add this tag and future to the callbacks
self.tag_map[(tag, matcher)].append(future)
self.request_map[request].append((tag, matcher, future))
if timeout:
timeout_future = tornado.ioloop.IOLoop.current().call_later(timeout, self._timeout_future, tag, matcher, future)
self.timeout_map[future] = timeout_future
return future
|
java
|
public boolean getBoolean(int index, boolean def) {
Object tmp = mArray.get(index);
return tmp != null && tmp instanceof Boolean ? ((Boolean) tmp)
.booleanValue() : def;
}
|
python
|
def feedback_summaries(self):
"""
Access the feedback_summaries
:returns: twilio.rest.api.v2010.account.call.feedback_summary.FeedbackSummaryList
:rtype: twilio.rest.api.v2010.account.call.feedback_summary.FeedbackSummaryList
"""
if self._feedback_summaries is None:
self._feedback_summaries = FeedbackSummaryList(
self._version,
account_sid=self._solution['account_sid'],
)
return self._feedback_summaries
|
java
|
protected void aggregateEntry(Entry other, long currentTime,
double negLambda) {
this.data.aggregate(other.data, currentTime - this.timestamp,
negLambda);
this.timestamp = currentTime;
}
|
java
|
public void setElementView(String elementView) {
m_elementView = elementView;
if (LOG.isDebugEnabled()) {
LOG.debug("Setting element view to " + elementView);
}
}
|
java
|
public StringWriter append(CharSequence csq, int start, int end) {
CharSequence cs = (csq == null ? "null" : csq);
write(cs.subSequence(start, end).toString());
return this;
}
|
java
|
@Override
public ElementOperator<PlainTime> newValue(V value) {
return new TimeOperator(this, ElementOperator.OP_NEW_VALUE, value);
}
|
java
|
public Observable<Page<JobInner>> listByAgentNextAsync(final String nextPageLink) {
return listByAgentNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<JobInner>>, Page<JobInner>>() {
@Override
public Page<JobInner> call(ServiceResponse<Page<JobInner>> response) {
return response.body();
}
});
}
|
java
|
public void marshall(CoreDefinitionVersion coreDefinitionVersion, ProtocolMarshaller protocolMarshaller) {
if (coreDefinitionVersion == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(coreDefinitionVersion.getCores(), CORES_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
private Description checkAnnotations(
Tree tree,
int treePos,
List<? extends AnnotationTree> annotations,
Comment danglingJavadoc,
int firstModifierPos,
int lastModifierPos,
VisitorState state) {
SuggestedFix.Builder builder = SuggestedFix.builder();
List<AnnotationTree> moveBefore = new ArrayList<>();
List<AnnotationTree> moveAfter = new ArrayList<>();
boolean annotationProblem = false;
for (AnnotationTree annotation : annotations) {
int annotationPos = ((JCTree) annotation).getStartPosition();
if (annotationPos <= firstModifierPos) {
continue;
}
AnnotationType annotationType =
ASTHelpers.getAnnotationType(annotation, getSymbol(tree), state);
if (annotationPos >= lastModifierPos) {
if (tree instanceof ClassTree || annotationType == AnnotationType.DECLARATION) {
annotationProblem = true;
moveBefore.add(annotation);
}
} else {
annotationProblem = true;
if (tree instanceof ClassTree
|| annotationType == AnnotationType.DECLARATION
|| annotationType == null) {
moveBefore.add(annotation);
} else {
moveAfter.add(annotation);
}
}
}
if (annotationProblem) {
for (AnnotationTree annotation : moveBefore) {
builder.delete(annotation);
}
for (AnnotationTree annotation : moveAfter) {
builder.delete(annotation);
}
String javadoc =
danglingJavadoc == null ? "" : removeJavadoc(state, treePos, danglingJavadoc, builder);
builder
.replace(
firstModifierPos,
firstModifierPos,
String.format("%s%s ", javadoc, joinSource(state, moveBefore)))
.replace(
lastModifierPos, lastModifierPos, String.format("%s ", joinSource(state, moveAfter)));
ImmutableList<String> names =
annotations.stream()
.map(ASTHelpers::getSymbol)
.filter(Objects::nonNull)
.map(Symbol::getSimpleName)
.map(a -> "@" + a)
.collect(toImmutableList());
String flattened = names.stream().collect(joining(", "));
String isAre = names.size() > 1 ? "are not type annotations" : "is not a type annotation";
String message =
String.format(
"%s %s, so should appear before any modifiers and after Javadocs.", flattened, isAre);
return buildDescription(tree).setMessage(message).addFix(builder.build()).build();
}
return NO_MATCH;
}
|
python
|
def update_cached_response(self, request, response):
"""On a 304 we will get a new set of headers that we want to
update our cached value with, assuming we have one.
This should only ever be called when we've sent an ETag and
gotten a 304 as the response.
"""
cache_url = self.cache_url(request.url)
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
if not cached_response:
# we didn't have a cached response
return response
# Lets update our headers with the headers from the new request:
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
#
# The server isn't supposed to send headers that would make
# the cached body invalid. But... just in case, we'll be sure
# to strip out ones we know that might be problmatic due to
# typical assumptions.
excluded_headers = ["content-length"]
cached_response.headers.update(
dict(
(k, v)
for k, v in response.headers.items()
if k.lower() not in excluded_headers
)
)
# we want a 200 b/c we have content via the cache
cached_response.status = 200
# update our cache
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
return cached_response
|
java
|
public void deattach(JainMgcpListener listener) {
int identifier = -1;
//search identifier of the specified listener
Set<Integer> IDs = txListeners.keySet();
for (Integer id : IDs) {
if (txListeners.get(id) == listener) {
identifier = id;
break;
}
}
//remove it from list if found
if (identifier != -1) {
txListeners.remove(identifier);
}
}
|
python
|
def get_dependencies(self):
"""
Retrieve the set of all dependencies for a given configuration.
Returns:
utils.utils.OrderedSet: The set of all dependencies for the
tracked configuration.
"""
all_deps = OrderedSet()
for key, _ in list(self.__config.items()):
if key in self.__cli:
continue
if key.endswith('sources'):
all_deps |= self.get_sources(key[:len('sources') * -1 - 1])
for key, _ in list(self.__cli.items()):
if key.endswith('sources'):
all_deps |= self.get_sources(key[:len('sources') * -1 - 1])
if self.conf_file is not None:
all_deps.add(self.conf_file)
all_deps.add(self.get_path("sitemap", rel_to_cwd=True))
cwd = os.getcwd()
return [os.path.relpath(fname, cwd) for fname in all_deps if fname]
|
java
|
public static SSLContext acceptingSslContext() {
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, TRUST_MANAGERS, new SecureRandom());
return sslContext;
} catch (NoSuchAlgorithmException | KeyManagementException ex) {
throw new RuntimeException("Unable to create issue-ignoring SSLContext: " + ex.getMessage(), ex);
}
}
|
java
|
public boolean mapLocalToTarget()
{
GVRSkeleton srcskel = mSourceSkeleton;
GVRSkeleton dstskel = mDestSkeleton;
Vector3f v = new Vector3f();
if ((dstskel == null) || (srcskel == null))
{
return false;
}
if (mBoneMap == null)
{
mBoneMap = makeBoneMap(srcskel, dstskel);
}
GVRPose srcpose = srcskel.getPose();
Quaternionf q = new Quaternionf();
int numsrcbones = srcskel.getNumBones();
mDestPose.clearRotations();
srcskel.getPosition(v);
dstskel.setPosition(v);
for (int i = 0; i < numsrcbones; ++i)
{
int boneindex = mBoneMap[i];
if (boneindex >= 0)
{
srcpose.getLocalRotation(i, q);
mDestPose.setLocalRotation(boneindex, q.x, q.y, q.z, q.w);
}
}
dstskel.applyPose(mDestPose, GVRSkeleton.ROTATION_ONLY);
return true;
}
|
java
|
@Override
public boolean isReadonly() {
if (isIdProperty() || isVersionProperty()) {
return false;
}
Indexed indexedAnnotation = getIndexAnnotation();
if (indexedAnnotation != null && indexedAnnotation.readonly()) {
return true;
}
if (indexedAnnotation == null && getFieldAnnotation() == null) {
return true;
}
return false;
}
|
python
|
def _windcalc(self, first, totalmass, nsteps, niso, ypssurf, ypsinit, \
X_i, E_i, cycles):
"""
This function calculates the windyields and ejected masses as called from
windyields(). It uses a summation version of the formulae used in Hirschi
et al. 2005, "Yields of rotating stars at solar metallicity".
If it is the first file, the arrays need to be created and the initial
abundances set
"""
if first == True:
X_i = np.zeros([niso], float)
E_i = np.zeros([niso], float)
ypsinit = ypssurf[0]
for m in range(niso):
for n in range(nsteps):
X_i[m] = X_i[m] + ((totalmass[n] - totalmass[n+1]) * \
(0.5 * (ypssurf[n][m] + ypssurf[n+1][m]) - ypsinit[m]))
E_i[m] = E_i[m] + ((totalmass[n] - totalmass[n+1]) * \
(0.5 * (ypssurf[n][m] + ypssurf[n+1][m])))
else:
for m in range(niso):
for n in range(nsteps):
X_i[m] = X_i[m] + ((totalmass[n] - totalmass[n+1]) * \
(0.5 * (ypssurf[n][m] + ypssurf[n+1][m]) - ypsinit[m]))
E_i[m] = E_i[m] + ((totalmass[n] - totalmass[n+1]) * \
(0.5 * (ypssurf[n][m] + ypssurf[n+1][m])))
return X_i, E_i
|
java
|
protected final UtlInvLine<RS, PurchaseInvoice, PurchaseInvoiceLine,
PurchaseInvoiceTaxLine, PurchaseInvoiceGoodsTaxLine> lazyGetUtlPurGdLn(
final Map<String, Object> pAddParam) throws Exception {
UtlInvLine<RS, PurchaseInvoice, PurchaseInvoiceLine,
PurchaseInvoiceTaxLine, PurchaseInvoiceGoodsTaxLine> utlInvLn = this
.utlPurInvGdLn;
if (utlInvLn == null) {
utlInvLn = new UtlInvLine<RS, PurchaseInvoice, PurchaseInvoiceLine,
PurchaseInvoiceTaxLine, PurchaseInvoiceGoodsTaxLine>();
utlInvLn.setUtlInvBase(lazyGetUtlInvBase(pAddParam));
utlInvLn.setInvTxMeth(lazyGetPurInvTxMeth(pAddParam));
utlInvLn.setIsMutable(false);
utlInvLn.setNeedMkTxCat(true);
utlInvLn.setLtlCl(PurchaseInvoiceGoodsTaxLine.class);
utlInvLn.setDstTxItLnCl(DestTaxGoodsLn.class);
FactoryPersistableBase<PurchaseInvoiceGoodsTaxLine> fctLtl =
new FactoryPersistableBase<PurchaseInvoiceGoodsTaxLine>();
fctLtl.setObjectClass(PurchaseInvoiceGoodsTaxLine.class);
fctLtl.setDatabaseId(getSrvDatabase().getIdDatabase());
utlInvLn.setFctLineTxLn(fctLtl);
//assigning fully initialized object:
this.utlPurInvGdLn = utlInvLn;
}
return utlInvLn;
}
|
java
|
public boolean parse(String criteria, Map<String, ? extends Object> attributes) throws CriteriaParseException {
if (criteria == null) {
return true;
} else {
try {
return Boolean.parseBoolean(FREEMARKER_BUILTINS.eval(criteria, attributes));
} catch (TemplateException ex) {
throw new CriteriaParseException(ex);
}
}
}
|
java
|
public static String getRelPath(String siblings, String resourceId)
{
String relResourceId = null;
if (resourceId != null && resourceId.length() != 0)
{
char cs = resourceId.charAt(0);
if (!(cs == '\\' || cs == '/'))
{
//相对路径
int i = siblings.length() - 1;
boolean find = false;
for (; i > 0; i--)
{
char c = siblings.charAt(i);
if (c == '\\' || c == '/')
{
find = true;
break;
}
}
if (find)
{
String parent = siblings.substring(0, i + 1);
relResourceId = parent.concat(resourceId);
}
else
{
relResourceId = resourceId;
}
}
else
{
relResourceId = resourceId;
}
if (BeetlUtil.isOutsideOfRoot(relResourceId))
{
throw new RuntimeException("不能访问外部文件或者模板");
}
return relResourceId;
}
else
{
throw new RuntimeException("资源ID为空,参数错");
}
}
|
java
|
public GlobalizationPreferences setCollator(Collator collator) {
if (isFrozen()) {
throw new UnsupportedOperationException("Attempt to modify immutable object");
}
try {
this.collator = (Collator) collator.clone(); // clone for safety
} catch (CloneNotSupportedException e) {
throw new ICUCloneNotSupportedException("Error in cloning collator", e);
}
return this;
}
|
java
|
@CanIgnoreReturnValue
@GwtIncompatible // TODO
public static <V> V getUninterruptibly(Future<V> future, long timeout, TimeUnit unit)
throws ExecutionException, TimeoutException {
boolean interrupted = false;
try {
long remainingNanos = unit.toNanos(timeout);
long end = System.nanoTime() + remainingNanos;
while (true) {
try {
// Future treats negative timeouts just like zero.
return future.get(remainingNanos, NANOSECONDS);
} catch (InterruptedException e) {
interrupted = true;
remainingNanos = end - System.nanoTime();
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
|
java
|
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case SimpleAntlrPackage.AND_EXPRESSION__LEFT:
return basicSetLeft(null, msgs);
case SimpleAntlrPackage.AND_EXPRESSION__RIGHT:
return basicSetRight(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
|
java
|
public static Field[] getFieldArrayIncludeSupClassExcludeUID(Class<?> clazz) {
Field[] currField = clazz.getDeclaredFields();
clazz = clazz.getSuperclass();
Field[] supField = clazz.getDeclaredFields();
Field[] temp = new Field[currField.length + supField.length];
int length = 0;
for (Field curr : currField) {
if ("serialVersionUID".equals(curr.getName())) {
continue;
}
temp[length] = curr;
length++;
}
for (Field sup : supField) {
if ("serialVersionUID".equals(sup.getName())) {
continue;
}
temp[length] = sup;
length++;
}
Field[] all = new Field[length];
for (int i = 0; i < all.length; i++) {
all[i] = temp[i];
}
return all;
}
|
java
|
public static Predicate<RequestHandler> withMethodAnnotation(final Class<? extends Annotation> annotation) {
return input -> input.isAnnotatedWith(annotation);
}
|
java
|
static Stream<Attribute> getTableAttributes(EntityType entityType) {
return getPersistedAttributes(entityType).filter(PostgreSqlQueryUtils::isTableAttribute);
}
|
java
|
public static boolean startsWithAny(String string, String[] searchStrings) {
if (isEmpty(string) || ArrayUtils.isEmpty(searchStrings)) {
return false;
}
for (int i = 0; i < searchStrings.length; i++) {
String searchString = searchStrings[i];
if (StringUtils.startsWith(string, searchString)) {
return true;
}
}
return false;
}
|
python
|
def replace_by_etree(self, root_el, el_idx=0):
"""Replace element.
Select element that has the same name as ``root_el``, then replace the selected
element with ``root_el``
``root_el`` can be a single element or the root of an element tree.
Args:
root_el : element
New element that will replace the existing element.
"""
el = self.get_element_by_name(root_el.tag, el_idx)
el[:] = list(root_el)
el.attrib = root_el.attrib
|
java
|
public Observable<ServiceResponse<RegistryPoliciesInner>> beginUpdatePoliciesWithServiceResponseAsync(String resourceGroupName, String registryName, RegistryPoliciesInner registryPoliciesUpdateParameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (registryName == null) {
throw new IllegalArgumentException("Parameter registryName is required and cannot be null.");
}
if (registryPoliciesUpdateParameters == null) {
throw new IllegalArgumentException("Parameter registryPoliciesUpdateParameters is required and cannot be null.");
}
Validator.validate(registryPoliciesUpdateParameters);
final String apiVersion = "2017-10-01";
return service.beginUpdatePolicies(this.client.subscriptionId(), resourceGroupName, registryName, apiVersion, registryPoliciesUpdateParameters, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<RegistryPoliciesInner>>>() {
@Override
public Observable<ServiceResponse<RegistryPoliciesInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<RegistryPoliciesInner> clientResponse = beginUpdatePoliciesDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
|
python
|
def logs(channel):
"Where can one find the logs?"
default_url = 'http://' + socket.getfqdn()
base = pmxbot.config.get('logs URL', default_url)
logged_channel = channel in pmxbot.config.log_channels
path = '/channel/' + channel.lstrip('#') if logged_channel else '/'
return urllib.parse.urljoin(base, path)
|
python
|
def flexifunction_directory_ack_encode(self, target_system, target_component, directory_type, start_index, count, result):
'''
Acknowldge sucess or failure of a flexifunction command
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
directory_type : 0=inputs, 1=outputs (uint8_t)
start_index : index of first directory entry to write (uint8_t)
count : count of directory entries to write (uint8_t)
result : result of acknowledge, 0=fail, 1=good (uint16_t)
'''
return MAVLink_flexifunction_directory_ack_message(target_system, target_component, directory_type, start_index, count, result)
|
java
|
@Override
public void setClassProperties(final String name, final Map<String, String[]> properties) throws DevFailed {
cache.setClassProperties(name, properties);
}
|
java
|
public synchronized VNFPackageAgent getVNFPackageAgent() {
if (this.vnfPackageAgent == null) {
if (isService) {
this.vnfPackageAgent =
new VNFPackageAgent(
this.serviceName,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version,
this.serviceKey);
} else {
this.vnfPackageAgent =
new VNFPackageAgent(
this.username,
this.password,
this.projectId,
this.sslEnabled,
this.nfvoIp,
this.nfvoPort,
this.version);
}
}
return this.vnfPackageAgent;
}
|
python
|
def init_queue():
"""Initialize indexing queue."""
def action(queue):
queue.declare()
click.secho('Indexing queue has been initialized.', fg='green')
return queue
return action
|
java
|
public com.google.api.ads.adwords.axis.v201809.cm.DateRange getDateRange() {
return dateRange;
}
|
java
|
private Object getDestination(Object value,
Class<?> type,
String destinationType,
AtomicServiceReference<AdminObjectService> destinationRef,
@Sensitive Map<String, Object> activationProps,
AdminObjectService adminObjSvc) throws Exception {
final String methodName = "getDestination";
final boolean trace = TraceComponent.isAnyTracingEnabled();
if (trace && tc.isEntryEnabled())
Tr.entry(tc, methodName, value, type, destinationType, destinationRef, activationProps, adminObjSvc);
if (trace && tc.isDebugEnabled())
Tr.debug(tc, "Resource adapter id", resourceAdapterID);
// Special case for WMQ: useJNDI=true activation config property
boolean isString = String.class.equals(type);
String savedValue = isString ? (String) value : null;
boolean isJNDIName = resourceAdapterID.equals(WMQJMS)
&& isString
&& activationProps != null
&& Boolean.parseBoolean((String) activationProps.get("useJNDI"));
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "isString, isJNDIName, savedValue", isString, isJNDIName, savedValue);
if (adminObjSvc != null) {
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "use adminObjSvc found by MDB runtime");
if (isJNDIName) {
value = adminObjSvc.getJndiName();
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "useJNDI name was specified, using jndiName from the admin obj svc from mdb", value);
} else {
value = adminObjSvc.createResource(null);
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "created admin object resource using admin object service from mdb runtime", value);
}
} else {
ServiceReference<AdminObjectService> reference = destinationRef != null ? destinationRef.getReference() : null;
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "reference", reference);
if (reference != null && !"com.ibm.ws.jca.destination.unspecified".equals(reference.getProperty("component.name")) &&
(value == null
|| value.equals(reference.getProperty(ID)) // id takes precedence over jndiName
|| value.equals(reference.getProperty(ResourceFactory.JNDI_NAME)))) {
if (isJNDIName) {
value = reference.getProperty(ResourceFactory.JNDI_NAME);
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "return JNDI name", value);
} else {
value = destinationRef.getServiceWithException().createResource(null);
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "return the created resource based on destinationRef", value);
}
} else if (value != null && reference != null) {
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "use bundle context");
BundleContext bundleContext = Utils.priv.getBundleContext(componentContext);
String filter = FilterUtils.createPropertyFilter(ID, (String) value);
Collection<ServiceReference<AdminObjectService>> refs = Utils.priv.getServiceReferences(bundleContext, AdminObjectService.class, filter);
if (refs.isEmpty()) {
// See if it matches a jndiName if they didn't specify a valid id
filter = FilterUtils.createPropertyFilter(AdminObjectService.JNDI_NAME, (String) value);
refs = Utils.priv.getServiceReferences(bundleContext, AdminObjectService.class, filter);
if (refs.isEmpty()) {
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "An administered object for " + value + " was not found. This is ok if one was not provided.");
if (trace && tc.isEntryEnabled())
Tr.exit(tc, methodName);
return value;
}
}
reference = refs.iterator().next();
if (isJNDIName) {
value = reference.getProperty(AdminObjectService.JNDI_NAME);
} else {
AdminObjectService destinationSvc = Utils.priv.getService(bundleContext, reference);
value = destinationSvc.createResource(null);
// Do not unget the service because we are not done using it.
// This is similar to a JNDI lookup of an admin object which does not unget the service upon returning it to the app.
}
}
}
// Queue name or topic name
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "value, savedValue", value, savedValue);
// Skip this processing for third party resource adapters
if (resourceAdapterID.equals(WASJMS) || resourceAdapterID.equals(WMQJMS)) {
if (trace && tc.isDebugEnabled())
Tr.debug(tc, "Extra processing");
if (isString && !isJNDIName) {
String activationPropsDestinationType = activationProps == null ? null : (String) activationProps.get("destinationType");
destinationType = activationPropsDestinationType == null ? destinationType : activationPropsDestinationType;
if (destinationType == null)
destinationType = (String) properties.get("destinationType");
if (destinationType != null)
value = getDestinationName(destinationType, value);
}
} else {
if (trace && tc.isDebugEnabled())
Tr.debug(tc, "Extra processing skipped");
if (savedValue != null) {
value = savedValue;
if (trace && tc.isDebugEnabled())
Tr.debug(this, tc, "value, savedValue", value, savedValue);
}
}
if (trace && tc.isEntryEnabled())
Tr.exit(tc, methodName);
return value;
}
|
java
|
@Override
public synchronized void run() {
try {
long start = System.nanoTime();
createRequiredCollaborators();
if (reg == null) {
BundleContext context = config.getBundleContext();
if (context != null) {
reg = context.registerService(LTPAConfiguration.class,
config,
new Hashtable<String, String>());
config.setRegistration(reg);
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "The bundle context was null, we must have been deactivated while we were creating the keys");
}
return;
}
}
Tr.info(tc, "LTPA_CONFIG_READY", TimestampUtils.getElapsedTimeNanos(start), config.getKeyFile());
config.configReady();
} catch (Exception e) {
Tr.error(tc, "LTPA_CONFIG_ERROR", config.getKeyFile());
} catch (Throwable t) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Exception creating the LTPA key.", t);
}
Tr.error(tc, "LTPA_KEY_CREATE_ERROR");
}
}
|
python
|
def _BuildEventData(self, record):
"""Builds an FseventsdData object from a parsed structure.
Args:
record (dls_record_v1|dls_record_v2): parsed record structure.
Returns:
FseventsdEventData: event data attribute container.
"""
event_data = FseventsdEventData()
event_data.path = record.path
event_data.flags = record.event_flags
event_data.event_identifier = record.event_identifier
# Node identifier is only set in DLS V2 records.
event_data.node_identifier = getattr(record, 'node_identifier', None)
return event_data
|
python
|
def lookup_blob(hash_value):
"""
Combines all given arguments to create clean title-tags values.
All arguments are divided by a " " seperator and HTML tags
are to be removed.
"""
try:
blob = BlobStorage.objects.get(sha256=hash_value)
except:
return "Blob not found"
return blob.content
|
python
|
def _pad_bytes(request_bytes):
"""
:type request_bytes: bytes
:rtype: bytes
"""
padding_length = (_BLOCK_SIZE - len(request_bytes) % _BLOCK_SIZE)
padding_character = bytes(bytearray([padding_length]))
return request_bytes + padding_character * padding_length
|
python
|
def get_namespaced_custom_object(self, group, version, namespace, plural, name, **kwargs):
"""
Returns a namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_namespaced_custom_object(group, version, namespace, plural, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group: the custom resource's group (required)
:param str version: the custom resource's version (required)
:param str namespace: The custom resource's namespace (required)
:param str plural: the custom resource's plural name. For TPRs this would be lowercase plural kind. (required)
:param str name: the custom object's name (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, **kwargs)
else:
(data) = self.get_namespaced_custom_object_with_http_info(group, version, namespace, plural, name, **kwargs)
return data
|
java
|
@Override
public Result authenticate(HttpExchange httpExchange) {
Result result = null;
for (Authenticator a : authenticators) {
result = a.authenticate(httpExchange);
if ((result instanceof Success && mode == Mode.ANY) ||
(!(result instanceof Success) && mode == Mode.ALL)) {
return result;
}
}
// Return last result, which is either SUCCESS for mode.ALL or FAILURE for mode.ANY
return result;
}
|
java
|
public void identity_group_group_PUT(String group, String description, OvhRoleEnum role) throws IOException {
String qPath = "/me/identity/group/{group}";
StringBuilder sb = path(qPath, group);
HashMap<String, Object>o = new HashMap<String, Object>();
addBody(o, "description", description);
addBody(o, "role", role);
exec(qPath, "PUT", sb.toString(), o);
}
|
python
|
def _index_to_row_col(lines, index):
r"""
>>> lines = ['hello\n', 'world\n']
>>> _index_to_row_col(lines, 0)
(0, 0)
>>> _index_to_row_col(lines, 7)
(1, 1)
"""
if index < 0:
raise IndexError('negative index')
current_index = 0
for line_number, line in enumerate(lines):
line_length = len(line)
if current_index + line_length > index:
return line_number, index - current_index
current_index += line_length
raise IndexError('index %d out of range' % index)
|
java
|
@Override
protected TermsByQueryResponse newResponse(TermsByQueryRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
int numTerms = 0;
TermsSet[] termsSets = new TermsSet[shardsResponses.length()];
List<ShardOperationFailedException> shardFailures = null;
// we check each shard response
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) {
// simply ignore non active shards
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
failedShards++;
if (shardFailures == null) {
shardFailures = new ArrayList<>();
}
logger.error("Shard operation failed", (BroadcastShardOperationFailedException) shardResponse);
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else {
// on successful shard response, just add to the array or responses so we can process them below
// we calculate the total number of terms gathered across each shard so we can use it during
// initialization of the final TermsResponse below (to avoid rehashing during merging)
TermsByQueryShardResponse shardResp = ((TermsByQueryShardResponse) shardResponse);
TermsSet terms = shardResp.getTerms();
termsSets[i] = terms;
numTerms += terms.size();
successfulShards++;
}
}
// Merge the responses
try {
// NumericTermsSet is responsible for the merge, set size to avoid rehashing on certain implementations.
long expectedElements = request.expectedTerms() != null ? request.expectedTerms() : numTerms;
TermsSet termsSet = TermsSet.newTermsSet(expectedElements, request.termsEncoding(), breakerService.getBreaker(CircuitBreaker.REQUEST));
TermsByQueryResponse rsp;
try {
for (int i = 0; i < termsSets.length; i++) {
TermsSet terms = termsSets[i];
if (terms != null) {
termsSet.merge(terms);
terms.release(); // release the shard terms set and adjust the circuit breaker
termsSets[i] = null;
}
}
long tookInMillis = System.currentTimeMillis() - request.nowInMillis();
rsp = new TermsByQueryResponse(termsSet, tookInMillis, shardsResponses.length(), successfulShards, failedShards, shardFailures);
}
finally {
// we can now release the terms set and adjust the circuit breaker, since the TermsByQueryResponse holds an
// encoded version of the terms set
termsSet.release();
}
return rsp;
}
finally { // If something happens, release the terms sets and adjust the circuit breaker
for (int i = 0; i < termsSets.length; i++) {
TermsSet terms = termsSets[i];
if (terms != null) {
terms.release();
}
}
}
}
|
python
|
def resource_op_defaults_to(name, op_default, value, extra_args=None, cibname=None):
'''
Ensure a resource operation default in the cluster is set to a given value
Should be run on one cluster node only
(there may be races)
Can only be run on a node with a functional pacemaker/corosync
name
Irrelevant, not used (recommended: pcs_properties__resource_op_defaults_to_{{op_default}})
op_default
name of the operation default resource property
value
value of the operation default resource property
extra_args
additional options for the pcs command
cibname
use a cached CIB-file named like cibname instead of the live CIB
Example:
.. code-block:: yaml
pcs_properties__resource_op_defaults_to_monitor-interval:
pcs.resource_op_defaults_to:
- op_default: monitor-interval
- value: 60s
- cibname: cib_for_cluster_settings
'''
return _item_present(name=name,
item='resource',
item_id='{0}={1}'.format(op_default, value),
item_type=None,
show=['op', 'defaults'],
create=['op', 'defaults'],
extra_args=extra_args,
cibname=cibname)
|
python
|
def _find_install_targets(name=None,
version=None,
pkgs=None,
sources=None,
skip_suggestions=False,
pkg_verify=False,
normalize=True,
ignore_epoch=False,
reinstall=False,
refresh=False,
**kwargs):
'''
Inspect the arguments to pkg.installed and discover what packages need to
be installed. Return a dict of desired packages
'''
was_refreshed = False
if all((pkgs, sources)):
return {'name': name,
'changes': {},
'result': False,
'comment': 'Only one of "pkgs" and "sources" is permitted.'}
# dict for packages that fail pkg.verify and their altered files
altered_files = {}
# Get the ignore_types list if any from the pkg_verify argument
if isinstance(pkg_verify, list) \
and any(x.get('ignore_types') is not None
for x in pkg_verify
if isinstance(x, _OrderedDict)
and 'ignore_types' in x):
ignore_types = next(x.get('ignore_types')
for x in pkg_verify
if 'ignore_types' in x)
else:
ignore_types = []
# Get the verify_options list if any from the pkg_verify argument
if isinstance(pkg_verify, list) \
and any(x.get('verify_options') is not None
for x in pkg_verify
if isinstance(x, _OrderedDict)
and 'verify_options' in x):
verify_options = next(x.get('verify_options')
for x in pkg_verify
if 'verify_options' in x)
else:
verify_options = []
if __grains__['os'] == 'FreeBSD':
kwargs['with_origin'] = True
if salt.utils.platform.is_windows():
# Windows requires a refresh to establish a pkg db if refresh=True, so
# add it to the kwargs.
kwargs['refresh'] = refresh
resolve_capabilities = kwargs.get('resolve_capabilities', False) and 'pkg.list_provides' in __salt__
try:
cur_pkgs = __salt__['pkg.list_pkgs'](versions_as_list=True, **kwargs)
cur_prov = resolve_capabilities and __salt__['pkg.list_provides'](**kwargs) or dict()
except CommandExecutionError as exc:
return {'name': name,
'changes': {},
'result': False,
'comment': exc.strerror}
if salt.utils.platform.is_windows() and kwargs.pop('refresh', False):
# We already refreshed when we called pkg.list_pkgs
was_refreshed = True
refresh = False
if any((pkgs, sources)):
if pkgs:
desired = _repack_pkgs(pkgs, normalize=normalize)
elif sources:
desired = __salt__['pkg_resource.pack_sources'](
sources,
normalize=normalize,
)
if not desired:
# Badly-formatted SLS
return {'name': name,
'changes': {},
'result': False,
'comment': 'Invalidly formatted \'{0}\' parameter. See '
'minion log.'.format('pkgs' if pkgs
else 'sources')}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
else:
if salt.utils.platform.is_windows():
pkginfo = _get_package_info(name, saltenv=kwargs['saltenv'])
if not pkginfo:
return {'name': name,
'changes': {},
'result': False,
'comment': 'Package {0} not found in the '
'repository.'.format(name)}
if version is None:
version = _get_latest_pkg_version(pkginfo)
if normalize:
_normalize_name = \
__salt__.get('pkg.normalize_name', lambda pkgname: pkgname)
desired = {_normalize_name(name): version}
else:
desired = {name: version}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names
origin = bool(re.search('/', name))
if __grains__['os'] == 'FreeBSD' and origin:
cver = [k for k, v in six.iteritems(cur_pkgs)
if v['origin'] == name]
else:
cver = cur_pkgs.get(name, [])
if name not in to_unpurge:
if version and version in cver \
and not reinstall \
and not pkg_verify:
# The package is installed and is the correct version
return {'name': name,
'changes': {},
'result': True,
'comment': 'Version {0} of package \'{1}\' is already '
'installed'.format(version, name)}
# if cver is not an empty string, the package is already installed
elif cver and version is None \
and not reinstall \
and not pkg_verify:
# The package is installed
return {'name': name,
'changes': {},
'result': True,
'comment': 'Package {0} is already '
'installed'.format(name)}
version_spec = False
if not sources:
# Check for alternate package names if strict processing is not
# enforced. Takes extra time. Disable for improved performance
if not skip_suggestions:
# Perform platform-specific pre-flight checks
not_installed = dict([
(name, version)
for name, version in desired.items()
if not (name in cur_pkgs and (version is None or _fulfills_version_string(cur_pkgs[name], version)))
])
if not_installed:
try:
problems = _preflight_check(not_installed, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get('no_suggest'):
comments.append(
'The following package(s) were not found, and no '
'possible matches were found in the package db: '
'{0}'.format(
', '.join(sorted(problems['no_suggest']))
)
)
if problems.get('suggest'):
for pkgname, suggestions in \
six.iteritems(problems['suggest']):
comments.append(
'Package \'{0}\' not found (possible matches: '
'{1})'.format(pkgname, ', '.join(suggestions))
)
if comments:
if len(comments) > 1:
comments.append('')
return {'name': name,
'changes': {},
'result': False,
'comment': '. '.join(comments).rstrip()}
# Resolve the latest package version for any packages with "latest" in the
# package version
wants_latest = [] \
if sources \
else [x for x, y in six.iteritems(desired) if y == 'latest']
if wants_latest:
resolved_latest = __salt__['pkg.latest_version'](*wants_latest,
refresh=refresh,
**kwargs)
if len(wants_latest) == 1:
resolved_latest = {wants_latest[0]: resolved_latest}
if refresh:
was_refreshed = True
refresh = False
# pkg.latest_version returns an empty string when the package is
# up-to-date. So check the currently-installed packages. If found, the
# resolved latest version will be the currently installed one from
# cur_pkgs. If not found, then the package doesn't exist and the
# resolved latest version will be None.
for key in resolved_latest:
if not resolved_latest[key]:
if key in cur_pkgs:
resolved_latest[key] = cur_pkgs[key][-1]
else:
resolved_latest[key] = None
# Update the desired versions with the ones we resolved
desired.update(resolved_latest)
# Find out which packages will be targeted in the call to pkg.install
targets = {}
to_reinstall = {}
problems = []
warnings = []
failed_verify = False
for package_name, version_string in six.iteritems(desired):
cver = cur_pkgs.get(package_name, [])
if resolve_capabilities and not cver and package_name in cur_prov:
cver = cur_pkgs.get(cur_prov.get(package_name)[0], [])
# Package not yet installed, so add to targets
if not cver:
targets[package_name] = version_string
continue
if sources:
if reinstall:
to_reinstall[package_name] = version_string
continue
elif 'lowpkg.bin_pkg_info' not in __salt__:
continue
# Metadata parser is available, cache the file and derive the
# package's name and version
err = 'Unable to cache {0}: {1}'
try:
cached_path = __salt__['cp.cache_file'](version_string, saltenv=kwargs['saltenv'])
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
continue
if not cached_path:
problems.append(err.format(version_string, 'file not found'))
continue
elif not os.path.exists(cached_path):
problems.append('{0} does not exist on minion'.format(version_string))
continue
source_info = __salt__['lowpkg.bin_pkg_info'](cached_path)
if source_info is None:
warnings.append('Failed to parse metadata for {0}'.format(version_string))
continue
else:
verstr = source_info['version']
else:
verstr = version_string
if reinstall:
to_reinstall[package_name] = version_string
continue
if not __salt__['pkg_resource.check_extra_requirements'](package_name, version_string):
targets[package_name] = version_string
continue
# No version specified and pkg is installed
elif __salt__['pkg_resource.version_clean'](version_string) is None:
if (not reinstall) and pkg_verify:
try:
verify_result = __salt__['pkg.verify'](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
continue
version_fulfilled = False
allow_updates = bool(not sources and kwargs.get('allow_updates'))
try:
version_fulfilled = _fulfills_version_string(cver, verstr, ignore_epoch=ignore_epoch, allow_updates=allow_updates)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
# Compare desired version against installed version.
version_spec = True
if not version_fulfilled:
if reinstall:
to_reinstall[package_name] = version_string
else:
version_conditions = _parse_version_string(version_string)
if pkg_verify and any(oper == '==' for oper, version in version_conditions):
try:
verify_result = __salt__['pkg.verify'](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
else:
log.debug(
'Current version (%s) did not match desired version '
'specification (%s), adding to installation targets',
cver, version_string
)
targets[package_name] = version_string
if failed_verify:
problems.append(failed_verify)
if problems:
return {'name': name,
'changes': {},
'result': False,
'comment': ' '.join(problems)}
if not any((targets, to_unpurge, to_reinstall)):
# All specified packages are installed
msg = 'All specified packages are already installed{0}'
msg = msg.format(
' and are at the desired version' if version_spec and not sources
else ''
)
ret = {'name': name,
'changes': {},
'result': True,
'comment': msg}
if warnings:
ret.setdefault('warnings', []).extend(warnings)
return ret
return (desired, targets, to_unpurge, to_reinstall, altered_files,
warnings, was_refreshed)
|
python
|
def eventloop(cls, custom_actions=[]):
"""
For use outside of iteration wrapping. Makes an interactive event loop
custom_actions should be specified in format
[dispname, keys, desc, func]
"""
iiter = cls([None], custom_actions=custom_actions, verbose=False)
print('[IITER] Begining interactive main loop')
for _ in iiter:
pass
return iiter
|
python
|
def email(self, comment, content_object, request):
"""
Overwritten for a better email notification.
"""
if not self.email_notification:
return
send_comment_posted(comment, request)
|
java
|
protected void updateApiVersion(KubernetesResourceList list) {
String version = getApiVersion();
if (list != null && version != null && version.length() > 0) {
List items = list.getItems();
if (items != null) {
for (Object item : items) {
if (item instanceof HasMetadata) {
updateApiVersion((HasMetadata) item);
}
}
}
}
}
|
python
|
def get_config_h_filename():
"""Return the path of pyconfig.h."""
if _PYTHON_BUILD:
if os.name == "nt":
inc_dir = os.path.join(_PROJECT_BASE, "PC")
else:
inc_dir = _PROJECT_BASE
else:
inc_dir = get_path('platinclude')
return os.path.join(inc_dir, 'pyconfig.h')
|
java
|
void createImpl() throws SocketException {
if (impl == null)
setImpl();
try {
impl.create(true);
created = true;
} catch (IOException e) {
throw new SocketException(e.getMessage());
}
}
|
java
|
protected SITransaction getTransaction() throws ResourceException
{
final String methodName = "getTransaction";
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled())
{
SibTr.entry(this, TRACE, methodName);
}
final SITransaction transaction = getXaResource();
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled())
{
SibTr.exit(this, TRACE, methodName, transaction);
}
return transaction;
}
|
python
|
def _read_routes_c_v1():
"""Retrieve Windows routes through a GetIpForwardTable call.
This is compatible with XP but won't get IPv6 routes."""
def _extract_ip(obj):
return inet_ntop(socket.AF_INET, struct.pack("<I", obj))
routes = []
for route in GetIpForwardTable():
ifIndex = route['ForwardIfIndex']
dest = route['ForwardDest']
netmask = route['ForwardMask']
nexthop = _extract_ip(route['ForwardNextHop'])
metric = route['ForwardMetric1']
# Build route
try:
iface = dev_from_index(ifIndex)
if iface.ip == "0.0.0.0":
continue
except ValueError:
continue
ip = iface.ip
# RouteMetric + InterfaceMetric
metric = metric + iface.ipv4_metric
routes.append((dest, netmask, nexthop, iface, ip, metric))
return routes
|
python
|
def flags(self, index):
""" Returns the item flags for the given index.
"""
if not index.isValid():
return QtCore.Qt.NoItemFlags
item = index.internalPointer()
# return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable |
# QtCore.Qt.ItemIsDragEnabled
return item.getFlags()
|
python
|
def merge_configs(to_be_merged, default):
"""Merges two configuration dictionaries by overwriting values with
same keys, with the priority on values given on the 'left' side, so
the to_be_merged dict.
Notice that with lists in the configuration, it skips from the default
(right side) the tuples in that which already exist in the left side
to_be_merged list. This is used to be able to override time intervals for
default values in the configuration.
Example:
In [1]: x = [["get_stats_disk_usage_for_database", 180],
["get_stats_tx_rate_for_database", 500]]
In [2]: y = [["get_stats_seconds_since_last_vacuum_per_table", 60],
["get_stats_tx_rate_for_database", 60]]
In [3]: merge_configs(x, y)
Out[3]:
[['get_stats_disk_usage_for_database', 180],
['get_stats_tx_rate_for_database', 500],
['get_stats_seconds_since_last_vacuum_per_table', 60]]
"""
if isinstance(to_be_merged, dict) and isinstance(default, dict):
for k, v in default.items():
if k not in to_be_merged:
to_be_merged[k] = v
else:
to_be_merged[k] = merge_configs(to_be_merged[k], v)
elif isinstance(to_be_merged, list) and isinstance(default, list):
same_keys = set()
for x in to_be_merged:
for y in default:
if isinstance(x, (list, set, tuple)) and isinstance(y, (list, set, tuple)) and len(
x) > 0 and len(y) > 0 and x[0] == y[0]:
same_keys.add(x[0])
for y in default:
if not isinstance(y, (list, set, tuple)) or y[0] not in same_keys:
to_be_merged.append(y)
return to_be_merged
|
python
|
def _safe_processing(nsafefn, source, _globals=None, _locals=None):
"""Do a safe processing of input fn in using SAFE_BUILTINS.
:param fn: function to call with input parameters.
:param source: source object to process with fn.
:param dict _globals: global objects by name.
:param dict _locals: local objects by name.
:return: fn processing result"""
if _globals is None:
_globals = SAFE_BUILTINS
else:
_globals.update(SAFE_BUILTINS)
return nsafefn(source, _globals, _locals)
|
java
|
private static BannerUpdate parseNextBannerUpdate(Element element) {
BannerUpdate bannerUpdate = new BannerUpdate();
bannerUpdate.setSeasonNum(DOMHelper.getValueFromElement(element, "SeasonNum"));
bannerUpdate.setSeriesId(DOMHelper.getValueFromElement(element, SERIES));
bannerUpdate.setFormat(DOMHelper.getValueFromElement(element, "format"));
bannerUpdate.setLanguage(DOMHelper.getValueFromElement(element, "language"));
bannerUpdate.setPath(DOMHelper.getValueFromElement(element, "path"));
bannerUpdate.setTime(DOMHelper.getValueFromElement(element, TIME));
bannerUpdate.setType(DOMHelper.getValueFromElement(element, "type"));
return bannerUpdate;
}
|
python
|
def ctox(arguments, toxinidir):
"""Sets up conda environments, and sets up and runs each environment based
on the project's tox.ini configuration file.
Returns 1 if either the build or running the commands failed or 0 if
all commmands ran successfully.
"""
if arguments is None:
arguments = []
if toxinidir is None:
toxinidir = os.getcwd()
args, options = parse_args(arguments)
if args.version:
print(version)
return 0
# if no conda trigger OSError
try:
with open(os.devnull, "w") as fnull:
check_output(['conda', '--version'], stderr=fnull)
except OSError:
cprint("conda not found, you need to install it to use ctox.\n"
"The recommended way is to download miniconda,\n"
"Do not install conda via pip.", 'err')
return 1
toxinifile = os.path.join(toxinidir, "tox.ini")
from ctox.config import read_config, get_envlist
config = read_config(toxinifile)
if args.e == 'ALL':
envlist = get_envlist(config)
else:
envlist = args.e.split(',')
# TODO configure with option
toxdir = os.path.join(toxinidir, ".tox")
# create a zip file for the project
from ctox.pkg import make_dist, package_name
cprint("GLOB sdist-make: %s" % os.path.join(toxinidir, "setup.py"))
package = package_name(toxinidir)
if not make_dist(toxinidir, toxdir, package):
cprint(" setup.py sdist failed", 'err')
return 1
# setup each environment and run ctox
failing = {}
for env_name in envlist:
env = Env(name=env_name, config=config, options=options,
toxdir=toxdir, toxinidir=toxinidir, package=package)
failing[env_name] = env.ctox()
# print summary of the outcomes of ctox for each environment
cprint('Summary')
print("-" * 23)
for env_name in envlist:
n = failing[env_name]
outcome = ('succeeded', 'failed', 'skipped')[n]
status = ('ok', 'err', 'warn')[n]
cprint("%s commands %s" % (env_name, outcome), status)
return any(1 == v for v in failing.values())
|
python
|
def create_fork(self, organization=None):
"""Create a fork of this repository.
:param str organization: (required), login for organization to create
the fork under
:returns: :class:`Repository <Repository>` if successful, else None
"""
url = self._build_url('forks', base_url=self._api)
if organization:
resp = self._post(url, data={'organization': organization})
else:
resp = self._post(url)
json = self._json(resp, 202)
return Repository(json, self) if json else None
|
java
|
public int getFeaturePosition(AbstractFeature feature) {
AbstractFeature result = feature;
if (!featurePositions.containsKey(feature)) {
if (feature instanceof FeatureGroup) {
FeatureGroup featureGroup = (FeatureGroup) feature;
List<Feature> features = featureGroup.getConcreteFeatures();
if (!features.isEmpty()) {
Collections.sort(features, new Comparator<Feature>() {
@Override
public int compare(Feature feat1, Feature feat2) {
return getFeaturePosition(feat1) - getFeaturePosition(feat2);
}
});
result = features.get(0);
}
}
}
return featurePositions.getOrDefault(result, 0);
}
|
python
|
def config_options(self):
"""
Generate a Trailets Config instance for shell widgets using our
config system
This lets us create each widget with its own config
"""
# ---- Jupyter config ----
try:
full_cfg = load_pyconfig_files(['jupyter_qtconsole_config.py'],
jupyter_config_dir())
# From the full config we only select the JupyterWidget section
# because the others have no effect here.
cfg = Config({'JupyterWidget': full_cfg.JupyterWidget})
except:
cfg = Config()
# ---- Spyder config ----
spy_cfg = Config()
# Make the pager widget a rich one (i.e a QTextEdit)
spy_cfg.JupyterWidget.kind = 'rich'
# Gui completion widget
completion_type_o = self.get_option('completion_type')
completions = {0: "droplist", 1: "ncurses", 2: "plain"}
spy_cfg.JupyterWidget.gui_completion = completions[completion_type_o]
# Pager
pager_o = self.get_option('use_pager')
if pager_o:
spy_cfg.JupyterWidget.paging = 'inside'
else:
spy_cfg.JupyterWidget.paging = 'none'
# Calltips
calltips_o = self.get_option('show_calltips')
spy_cfg.JupyterWidget.enable_calltips = calltips_o
# Buffer size
buffer_size_o = self.get_option('buffer_size')
spy_cfg.JupyterWidget.buffer_size = buffer_size_o
# Prompts
in_prompt_o = self.get_option('in_prompt')
out_prompt_o = self.get_option('out_prompt')
if in_prompt_o:
spy_cfg.JupyterWidget.in_prompt = in_prompt_o
if out_prompt_o:
spy_cfg.JupyterWidget.out_prompt = out_prompt_o
# Style
color_scheme = CONF.get('appearance', 'selected')
style_sheet = create_qss_style(color_scheme)[0]
spy_cfg.JupyterWidget.style_sheet = style_sheet
spy_cfg.JupyterWidget.syntax_style = color_scheme
# Merge QtConsole and Spyder configs. Spyder prefs will have
# prevalence over QtConsole ones
cfg._merge(spy_cfg)
return cfg
|
java
|
private static PersistenceUnitMetadata parsePersistenceUnit(final URL url, final String[] persistenceUnits,
Element top, final String versionName)
{
PersistenceUnitMetadata metadata = new PersistenceUnitMetadata(versionName, getPersistenceRootUrl(url), url);
String puName = top.getAttribute("name");
if (!Arrays.asList(persistenceUnits).contains(puName))
{
// Returning null because this persistence unit is not intended for
// creating entity manager factory.
return null;
}
if (!isEmpty(puName))
{
log.trace("Persistent Unit name from persistence.xml: " + puName);
metadata.setPersistenceUnitName(puName);
String transactionType = top.getAttribute("transaction-type");
if (StringUtils.isEmpty(transactionType)
|| PersistenceUnitTransactionType.RESOURCE_LOCAL.name().equals(transactionType))
{
metadata.setTransactionType(PersistenceUnitTransactionType.RESOURCE_LOCAL);
}
else if (PersistenceUnitTransactionType.JTA.name().equals(transactionType))
{
metadata.setTransactionType(PersistenceUnitTransactionType.JTA);
}
}
NodeList children = top.getChildNodes();
for (int i = 0; i < children.getLength(); i++)
{
if (children.item(i).getNodeType() == Node.ELEMENT_NODE)
{
Element element = (Element) children.item(i);
String tag = element.getTagName();
if (tag.equals("provider"))
{
metadata.setProvider(getElementContent(element));
}
else if (tag.equals("properties"))
{
NodeList props = element.getChildNodes();
for (int j = 0; j < props.getLength(); j++)
{
if (props.item(j).getNodeType() == Node.ELEMENT_NODE)
{
Element propElement = (Element) props.item(j);
// if element is not "property" then skip
if (!"property".equals(propElement.getTagName()))
{
continue;
}
String propName = propElement.getAttribute("name").trim();
String propValue = propElement.getAttribute("value").trim();
if (isEmpty(propValue))
{
propValue = getElementContent(propElement, "");
}
metadata.getProperties().put(propName, propValue);
}
}
}
else if (tag.equals("class"))
{
metadata.getClasses().add(getElementContent(element));
}
else if (tag.equals("jar-file"))
{
metadata.addJarFile(getElementContent(element));
}
else if (tag.equals("exclude-unlisted-classes"))
{
String excludeUnlisted = getElementContent(element);
metadata.setExcludeUnlistedClasses(Boolean.parseBoolean(excludeUnlisted));
}
}
}
PersistenceUnitTransactionType transactionType = getTransactionType(top.getAttribute("transaction-type"));
if (transactionType != null)
{
metadata.setTransactionType(transactionType);
}
return metadata;
}
|
python
|
def create_default_layout(app, message='',
lexer=None, is_password=False,
reserve_space_for_menu=False,
get_prompt_tokens=None,
get_bottom_toolbar_tokens=None,
display_completions_in_columns=False,
extra_input_processors=None, multiline=False):
"""
Generate default layout.
Returns a ``Layout`` instance.
:param message: Text to be used as prompt.
:param lexer: Lexer to be used for the highlighting.
:param is_password: `bool` or `CLIFilter`. When True, display input as '*'.
:param reserve_space_for_menu: When True, make sure that a minimal height
is allocated in the terminal, in order to display the completion menu.
:param get_prompt_tokens: An optional callable that returns the tokens to
be shown in the menu. (To be used instead of a `message`.)
:param get_bottom_toolbar_tokens: An optional callable that returns the
tokens for a toolbar at the bottom.
:param display_completions_in_columns: `bool` or `CLIFilter`. Display the
completions in multiple columns.
:param multiline: `bool` or `CLIFilter`. When True, prefer a layout that is
more adapted for multiline input. Text after newlines is automatically
indented, and search/arg input is shown below the input, instead of
replacing the prompt.
"""
assert isinstance(message, text_type)
assert (get_bottom_toolbar_tokens is None or
callable(get_bottom_toolbar_tokens))
assert get_prompt_tokens is None or callable(get_prompt_tokens)
assert not (message and get_prompt_tokens)
display_completions_in_columns = to_cli_filter(
display_completions_in_columns)
multiline = to_cli_filter(multiline)
if get_prompt_tokens is None:
get_prompt_tokens = lambda _: [(Token.Prompt, message)]
get_prompt_tokens_1, get_prompt_tokens_2 = _split_multiline_prompt(
get_prompt_tokens)
# `lexer` is supposed to be a `Lexer` instance. But if a Pygments lexer
# class is given, turn it into a PygmentsLexer. (Important for
# backwards-compatibility.)
try:
if issubclass(lexer, Lexer):
lexer = PygmentsLexer(lexer)
except TypeError:
# Happens when lexer is `None` or an instance of something else.
pass
# Create processors list.
# (DefaultPrompt should always be at the end.)
input_processors = [
ConditionalProcessor(
# By default, only highlight search when the search
# input has the focus. (Note that this doesn't mean
# there is no search: the Vi 'n' binding for instance
# still allows to jump to the next match in
# navigation mode.)
HighlightSearchProcessor(preview_search=Always()),
HasFocus(SEARCH_BUFFER)),
HighlightSelectionProcessor(),
ConditionalProcessor(
AppendAutoSuggestion(), HasFocus(DEFAULT_BUFFER) & ~IsDone()),
ConditionalProcessor(PasswordProcessor(), is_password)
]
if extra_input_processors:
input_processors.extend(extra_input_processors)
# Show the prompt before the input (using the DefaultPrompt processor.
# This also replaces it with reverse-i-search and 'arg' when required.
# (Only for single line mode.)
input_processors.append(ConditionalProcessor(
DefaultPrompt(get_prompt_tokens), ~multiline))
# Create bottom toolbar.
if get_bottom_toolbar_tokens:
toolbars = [ConditionalContainer(
Window(TokenListControl(get_bottom_toolbar_tokens,
default_char=Char(' ', Token.Toolbar)),
height=LayoutDimension.exact(1)),
filter=~IsDone() & RendererHeightIsKnown())]
else:
toolbars = []
def get_height(cli):
# If there is an autocompletion menu to be shown, make sure that our
# layout has at least a minimal height in order to display it.
if reserve_space_for_menu and not cli.is_done:
return LayoutDimension(min=8)
else:
return LayoutDimension()
def separator():
return ConditionalContainer(
content=Window(height=LayoutDimension.exact(1),
content=FillControl(u'\u2500',
token=Token.Separator)),
filter=HasDocumentation(app) & ~IsDone())
# Create and return Layout instance.
return HSplit([
ConditionalContainer(
Window(
TokenListControl(get_prompt_tokens_1),
dont_extend_height=True),
filter=multiline,
),
VSplit([
# In multiline mode, the prompt is displayed in a left pane.
ConditionalContainer(
Window(
TokenListControl(get_prompt_tokens_2),
dont_extend_width=True,
),
filter=multiline,
),
# The main input, with completion menus floating on top of it.
FloatContainer(
Window(
BufferControl(
input_processors=input_processors,
lexer=lexer,
# Enable preview_search, we want to have immediate
# feedback in reverse-i-search mode.
preview_search=Always(),
focus_on_click=True,
),
get_height=get_height,
),
[
Float(xcursor=True,
ycursor=True,
content=CompletionsMenu(
max_height=16,
scroll_offset=1,
extra_filter=(HasFocus(DEFAULT_BUFFER) &
~display_completions_in_columns))),
Float(xcursor=True,
ycursor=True,
content=MultiColumnCompletionsMenu(
extra_filter=(HasFocus(DEFAULT_BUFFER) &
display_completions_in_columns),
show_meta=Always()))
]
),
]),
separator(),
ConditionalContainer(
content=Window(
BufferControl(
focus_on_click=True,
buffer_name=u'clidocs',
),
height=LayoutDimension(max=15)),
filter=HasDocumentation(app) & ~IsDone(),
),
separator(),
ValidationToolbar(),
SystemToolbar(),
# In multiline mode, we use two toolbars for 'arg' and 'search'.
ConditionalContainer(ArgToolbar(), multiline),
ConditionalContainer(SearchToolbar(), multiline),
] + toolbars)
|
python
|
def tokenize(text):
"""
Yield tokens.
Args:
text (str): The original text.
Yields:
dict: The next token.
"""
stem = PorterStemmer().stem
tokens = re.finditer('[a-z]+', text.lower())
for offset, match in enumerate(tokens):
# Get the raw token.
unstemmed = match.group(0)
yield { # Emit the token.
'stemmed': stem(unstemmed),
'unstemmed': unstemmed,
'offset': offset
}
|
java
|
@Override
protected SoyValue visitFunctionNode(FunctionNode node) {
Object soyFunction = node.getSoyFunction();
// Handle nonplugin functions.
if (soyFunction instanceof BuiltinFunction) {
BuiltinFunction nonpluginFn = (BuiltinFunction) soyFunction;
switch (nonpluginFn) {
case IS_FIRST:
return visitIsFirstFunction(node);
case IS_LAST:
return visitIsLastFunction(node);
case INDEX:
return visitIndexFunction(node);
case CHECK_NOT_NULL:
return visitCheckNotNullFunction(node.getChild(0));
case CSS:
return visitCssFunction(node);
case XID:
return visitXidFunction(node);
case IS_PRIMARY_MSG_IN_USE:
return visitIsPrimaryMsgInUseFunction(node);
case V1_EXPRESSION:
throw new UnsupportedOperationException(
"the v1Expression function can't be used in templates compiled to Java");
case TO_FLOAT:
return visitToFloatFunction(node);
case DEBUG_SOY_TEMPLATE_INFO:
return BooleanData.forValue(debugSoyTemplateInfo);
case VE_DATA:
return NullData.INSTANCE;
case MSG_WITH_ID:
case REMAINDER:
// should have been removed earlier in the compiler
throw new AssertionError();
}
throw new AssertionError();
} else if (soyFunction instanceof SoyJavaFunction) {
List<SoyValue> args = this.visitChildren(node);
SoyJavaFunction fn = (SoyJavaFunction) soyFunction;
// Note: Arity has already been checked by CheckFunctionCallsVisitor.
return computeFunctionHelper(fn, args, node);
} else if (soyFunction instanceof SoyJavaSourceFunction) {
List<SoyValue> args = this.visitChildren(node);
SoyJavaSourceFunction fn = (SoyJavaSourceFunction) soyFunction;
// Note: Arity has already been checked by CheckFunctionCallsVisitor.
return computeFunctionHelper(fn, args, node);
} else if (soyFunction instanceof LoggingFunction) {
return StringData.forValue(((LoggingFunction) soyFunction).getPlaceholder());
} else {
throw RenderException.create(
"Failed to find Soy function with name '"
+ node.getFunctionName()
+ "'"
+ " (function call \""
+ node.toSourceString()
+ "\").");
}
}
|
java
|
public ResultList<Collection> searchCollection(String query, Integer page, String language) throws MovieDbException {
return tmdbSearch.searchCollection(query, page, language);
}
|
python
|
def com_google_fonts_check_name_subfamilyname(ttFont,
style_with_spaces,
familyname_with_spaces):
""" Check name table: FONT_SUBFAMILY_NAME entries. """
from fontbakery.utils import name_entry_id
failed = False
for name in ttFont['name'].names:
if name.nameID == NameID.FONT_SUBFAMILY_NAME:
if name.platformID == PlatformID.MACINTOSH:
expected_value = style_with_spaces
elif name.platformID == PlatformID.WINDOWS:
if style_with_spaces in ["Bold", "Bold Italic"]:
expected_value = style_with_spaces
else:
if "Italic" in style_with_spaces:
expected_value = "Italic"
else:
expected_value = "Regular"
else:
yield FAIL, Message("invalid-entry",
("Font should not have a "
"{} entry!").format(name_entry_id(name)))
failed = True
continue
string = name.string.decode(name.getEncoding()).strip()
if string != expected_value:
failed = True
yield FAIL, Message("bad-familyname",
("Entry {} on the 'name' table: "
"Expected '{}' "
"but got '{}'.").format(name_entry_id(name),
expected_value,
string))
if not failed:
yield PASS, "FONT_SUBFAMILY_NAME entries are all good."
|
python
|
def handle_packet(self, packet):
"""Process incoming packet dict and optionally call callback."""
if self.packet_callback:
# forward to callback
self.packet_callback(packet)
else:
print('packet', packet)
|
java
|
public String convertLineDataObjectPositionMigrationTempOrientToString(EDataType eDataType, Object instanceValue) {
return instanceValue == null ? null : instanceValue.toString();
}
|
java
|
private void installModules(final Node node, final NetworkContext context, final Handler<AsyncResult<Void>> doneHandler) {
List<ModuleContext> modules = new ArrayList<>();
for (ComponentContext<?> component : context.components()) {
if (component.isModule()) {
modules.add(component.asModule());
}
}
final CountingCompletionHandler<Void> counter = new CountingCompletionHandler<Void>(modules.size()).setHandler(doneHandler);
for (ModuleContext module : modules) {
installModule(node, module, counter);
}
}
|
python
|
def get_height(self, points, edge=True, attach=False, extra_height=0):
"""
Launch ``pyny.Place.get_height(points)`` recursively for all
the ``pyny.Place``.
The points outside the object will have a NaN value in the
z column. These point will not be stored but it will be
returned.
:param points: list of coordinates of the points to calculate.
:type points: ndarray (shape=(N, 2 or 3))
:param edge: If True, consider the points in the Polygon's edge
inside the Polygon.
:type edge: bool
:param attach: If True, stores the computed points along with
the Place's set of points.
:type attach: bool
:param extra_height: Adds an extra height (z value) to the
resulting points.
:type extra_height: float
:returns: (x, y, z)
:rtype: ndarray
"""
for place in self:
points = place.get_height(points, edge, attach, extra_height)
if not attach: return points
|
java
|
public Response uncheckout(Session session, String path)
{
try
{
Node node = session.getRootNode().getNode(TextUtil.relativizePath(path));
Version restoreVersion = node.getBaseVersion();
node.restore(restoreVersion, true);
return Response.ok().header(HttpHeaders.CACHE_CONTROL, "no-cache").build();
}
catch (UnsupportedRepositoryOperationException exc)
{
return Response.status(HTTPStatus.CONFLICT).entity(exc.getMessage()).build();
}
catch (LockException exc)
{
return Response.status(HTTPStatus.LOCKED).entity(exc.getMessage()).build();
}
catch (PathNotFoundException exc)
{
return Response.status(HTTPStatus.NOT_FOUND).entity(exc.getMessage()).build();
}
catch (RepositoryException exc)
{
LOG.error(exc.getMessage(), exc);
return Response.serverError().entity(exc.getMessage()).build();
}
}
|
java
|
@Override
protected void paintComponent(Graphics g) {
if (!isInitialized()) {
return;
}
super.paintComponent(g);
final Graphics2D G2 = (Graphics2D) g.create();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
G2.translate(getInnerBounds().x, getInnerBounds().y);
// Draw combined background image
G2.drawImage(bImage, 0, 0, null);
for (int i = 0; i < 13; i++) {
if (i < 7) {
if (i < noOfActiveLeds) {
G2.drawImage(ledGreenOn, ledPosition[i].x, ledPosition[i].y, null);
} else {
G2.drawImage(ledGreenOff, ledPosition[i].x, ledPosition[i].y, null);
}
}
if (i >= 7 && i < 12) {
if (i < noOfActiveLeds) {
G2.drawImage(ledYellowOn, ledPosition[i].x, ledPosition[i].y, null);
} else {
G2.drawImage(ledYellowOff, ledPosition[i].x, ledPosition[i].y, null);
}
}
if (i == 12) {
if (i < noOfActiveLeds) {
G2.drawImage(ledRedOn, ledPosition[i].x, ledPosition[i].y, null);
} else {
G2.drawImage(ledRedOff, ledPosition[i].x, ledPosition[i].y, null);
}
}
}
// Draw LCD display
if (isLcdVisible()) {
if (getLcdColor() == LcdColor.CUSTOM) {
G2.setColor(getCustomLcdForeground());
} else {
G2.setColor(getLcdColor().TEXT_COLOR);
}
G2.setFont(getLcdUnitFont());
final double UNIT_STRING_WIDTH;
if (isLcdUnitStringVisible()) {
unitLayout = new TextLayout(getLcdUnitString(), G2.getFont(), RENDER_CONTEXT);
UNIT_BOUNDARY.setFrame(unitLayout.getBounds());
G2.drawString(getLcdUnitString(), (int) (((getGaugeBounds().width - LCD.getWidth()) / 2.0) + (LCD.getWidth() - UNIT_BOUNDARY.getWidth()) - LCD.getWidth() * 0.03), (int) ((getGaugeBounds().height * 0.425) + LCD.getHeight() * 0.76));
UNIT_STRING_WIDTH = UNIT_BOUNDARY.getWidth();
} else {
UNIT_STRING_WIDTH = 0;
}
G2.setFont(getLcdValueFont());
switch (getModel().getNumberSystem()) {
case HEX:
valueLayout = new TextLayout(Integer.toHexString((int) getLcdValue()).toUpperCase(), G2.getFont(), RENDER_CONTEXT);
VALUE_BOUNDARY.setFrame(valueLayout.getBounds());
G2.drawString(Integer.toHexString((int) getLcdValue()).toUpperCase(), (int) (((getGaugeBounds().width - LCD.getWidth()) / 2.0) + (LCD.getWidth() - UNIT_STRING_WIDTH - VALUE_BOUNDARY.getWidth()) - LCD.getWidth() * 0.09), (int) ((getGaugeBounds().height * 0.425) + LCD.getHeight() * 0.76));
break;
case OCT:
valueLayout = new TextLayout(Integer.toOctalString((int) getLcdValue()), G2.getFont(), RENDER_CONTEXT);
VALUE_BOUNDARY.setFrame(valueLayout.getBounds());
G2.drawString(Integer.toOctalString((int) getLcdValue()), (int) (((getGaugeBounds().width - LCD.getWidth()) / 2.0) + (LCD.getWidth() - UNIT_STRING_WIDTH - VALUE_BOUNDARY.getWidth()) - LCD.getWidth() * 0.09), (int) ((getGaugeBounds().height * 0.425) + LCD.getHeight() * 0.76));
break;
case DEC:
default:
valueLayout = new TextLayout(formatLcdValue(getLcdValue()), G2.getFont(), RENDER_CONTEXT);
VALUE_BOUNDARY.setFrame(valueLayout.getBounds());
G2.drawString(formatLcdValue(getLcdValue()), (int) (((getGaugeBounds().width - LCD.getWidth()) / 2.0) + (LCD.getWidth() - UNIT_STRING_WIDTH - VALUE_BOUNDARY.getWidth()) - LCD.getWidth() * 0.09), (int) ((getGaugeBounds().height * 0.425) + LCD.getHeight() * 0.76));
break;
}
// Draw lcd info string
if (!getLcdInfoString().isEmpty()) {
G2.setFont(getLcdInfoFont());
infoLayout = new TextLayout(getLcdInfoString(), G2.getFont(), RENDER_CONTEXT);
INFO_BOUNDARY.setFrame(infoLayout.getBounds());
G2.drawString(getLcdInfoString(), LCD.getBounds().x + 5, LCD.getBounds().y + (int) INFO_BOUNDARY.getHeight() + 5);
}
// Draw lcd threshold indicator
if (getLcdNumberSystem() == NumberSystem.DEC && isLcdThresholdVisible() && getLcdValue() >= getLcdThreshold()) {
G2.drawImage(lcdThresholdImage, (int) (LCD.getX() + LCD.getHeight() * 0.0568181818), (int) (LCD.getY() + LCD.getHeight() - lcdThresholdImage.getHeight() - LCD.getHeight() * 0.0568181818), null);
}
}
// Draw combined foreground image
G2.drawImage(fImage, 0, 0, null);
if (!isEnabled()) {
G2.drawImage(disabledImage, 0, 0, null);
}
G2.translate(-getInnerBounds().x, -getInnerBounds().y);
G2.dispose();
}
|
java
|
void writeEntryId(Object entity) throws XMLStreamException, ODataEdmException {
xmlWriter.writeStartElement(ATOM_ID);
xmlWriter.writeCharacters(getEntryIdString(entity));
xmlWriter.writeEndElement();
}
|
java
|
public Double hincrByFloat(Object key, Object field, double value) {
Jedis jedis = getJedis();
try {
return jedis.hincrByFloat(keyToBytes(key), fieldToBytes(field), value);
}
finally {close(jedis);}
}
|
java
|
@SuppressWarnings("unchecked")
public T createView(final Connection _con,
final String _view)
throws SQLException
{
final Statement stmt = _con.createStatement();
try {
stmt.executeUpdate(new StringBuilder().append("create view ").append(_view)
.append(" as select 1").toString());
} finally {
stmt.close();
}
return (T) this;
}
|
python
|
def get_datanode_fp_meta(fp):
"""Processes a datanode style file path.
Section 3.2 of the `Data Reference Syntax`_ details:
It is recommended that ESGF data nodes should layout datasets
on disk mapping DRS components to directories as:
<activity>/<product>/<institute>/<model>/<experiment>/
<frequency>/<modeling_realm>/<mip_table>/<ensemble_member>/
<version_number>/<variable_name>/<CMOR filename>.nc
Arguments:
fp (str): A file path conforming to DRS spec.
Returns:
dict: Metadata as extracted from the file path.
.. _Data Reference Syntax:
http://cmip-pcmdi.llnl.gov/cmip5/docs/cmip5_data_reference_syntax.pdf
"""
# Copy metadata list then reverse to start at end of path
directory_meta = list(CMIP5_DATANODE_FP_ATTS)
# Prefer meta extracted from filename
meta = get_dir_meta(fp, directory_meta)
meta.update(get_cmor_fname_meta(fp))
return meta
|
java
|
public static MonomerStore getMonomerStore(Element rootElement)
throws MonomerException, IOException {
MonomerStore monomerStore = new MonomerStore();
Element monomerListElement = rootElement.getChild("Monomers");
if (monomerListElement != null) {
@SuppressWarnings("unchecked")
List<Element> elementList = monomerListElement.getChildren("Monomer");
for (Element monomerElement : elementList) {
Monomer m = MonomerParser.getMonomer(monomerElement);
monomerStore.addMonomer(m);
}
}
return monomerStore;
}
|
python
|
def generate_salt_cmd(target, module, args=None, kwargs=None):
"""
Generates a command (the arguments) for the `salt` or `salt-ssh` CLI
"""
args = args or []
kwargs = kwargs or {}
target = target or '*'
target = '"%s"' % target
cmd = [target, module]
for arg in args:
cmd.append(arg)
for key in kwargs:
cmd.append('{0}={1}'.format(key, kwargs[key]))
return cmd
|
java
|
private void writeUnixCleanup(File file, BufferedWriter bw) throws IOException {
bw.write("echo begin delete" + "\n");
bw.write("n=0" + "\n");
bw.write("while [ $n -ne 1 ]; do" + "\n");
bw.write(" sleep 3" + "\n");
bw.write(" if [ -e " + dir.replace('\\', '/') + " ]; then" + "\n");
bw.write(" rm -rf " + dir.replace('\\', '/') + "\n");
bw.write(" else" + "\n");
bw.write(" echo file not found - n=$n" + "\n");
bw.write(" n=1" + "\n");
bw.write(" fi" + "\n");
bw.write("done" + "\n");
bw.write("echo end delete" + "\n");
bw.write("rm " + file.getAbsolutePath().replace('\\', '/') + "\n");
}
|
java
|
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case AfplibPackage.OBJECT_CONTAINER_PRESENTATION_SPACE_SIZE__PDF_SIZE:
return getPDFSize();
}
return super.eGet(featureID, resolve, coreType);
}
|
java
|
protected boolean activityAlreadyContainsJobDeclarationEventType(List<EventSubscriptionJobDeclaration> jobDeclarationsForActivity,
EventSubscriptionJobDeclaration jobDeclaration){
for(EventSubscriptionJobDeclaration declaration: jobDeclarationsForActivity){
if(declaration.getEventType().equals(jobDeclaration.getEventType())){
return true;
}
}
return false;
}
|
java
|
@NonNull
public TransitionSet setOrdering(int ordering) {
switch (ordering) {
case ORDERING_SEQUENTIAL:
mPlayTogether = false;
break;
case ORDERING_TOGETHER:
mPlayTogether = true;
break;
default:
throw new AndroidRuntimeException("Invalid parameter for TransitionSet " +
"ordering: " + ordering);
}
return this;
}
|
java
|
public String getProxyRemoteAddr(String realIpHeader) {
String ip = null;
if (realIpHeader != null && realIpHeader.length() != 0) {
ip = getHeaderString(realIpHeader);
}
if (StringUtils.isBlank(ip)) {
ip = getHeaderString("X-Real-IP");
if (StringUtils.isBlank(ip) || "unknown".equalsIgnoreCase(ip)) {
ip = getHeaderString("X-Forwarded-For");
if (StringUtils.isNotBlank(ip) && !"unknown".equalsIgnoreCase(ip)) {
ip = ip.split(",")[0];
}
}
if (StringUtils.isBlank(ip) || "unknown".equalsIgnoreCase(ip)) {
ip = getHeaderString("Proxy-Client-IP");
}
if (StringUtils.isBlank(ip) || "unknown".equalsIgnoreCase(ip)) {
ip = getHeaderString("WL-Proxy-Client-IP");
}
if (StringUtils.isNotBlank(ip)) {
ip = ip.toLowerCase();
}
}
return ip;
}
|
java
|
public TeeOutputStream with(OutputStream stream, boolean autoclose) {
if(stream != null) {
this.streams.put(stream, autoclose);
}
return this;
}
|
python
|
def axes_grid(n, sharex=False, sharey=False, subplot_kw=None, **fig_kw):
'''Finds a reasonable arrangement of n axes. Returns (fig, axes) tuple.
For keyword arguments descriptions, see matplotlib.pyplot.subplots'''
r = np.floor(np.sqrt(n))
r, c = int(r), int(np.ceil(n / r))
fig, axes = plt.subplots(nrows=r, ncols=c, figsize=(c*4, r*4), squeeze=False,
sharex=sharex, sharey=sharey,
subplot_kw=subplot_kw, **fig_kw)
# Turn off any extra axes
for ax in axes.flat[n:]:
ax.set_axis_off()
return fig, axes
|
python
|
def subtract_imagenet_mean_preprocess_batch(batch):
"""Subtract ImageNet mean pixel-wise from a BGR image."""
batch = F.swapaxes(batch,0, 1)
(r, g, b) = F.split(batch, num_outputs=3, axis=0)
r = r - 123.680
g = g - 116.779
b = b - 103.939
batch = F.concat(b, g, r, dim=0)
batch = F.swapaxes(batch,0, 1)
return batch
|
java
|
public PagedList<CloudTask> list(final String jobId) {
ServiceResponseWithHeaders<Page<CloudTask>, TaskListHeaders> response = listSinglePageAsync(jobId).toBlocking().single();
return new PagedList<CloudTask>(response.body()) {
@Override
public Page<CloudTask> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink, null).toBlocking().single().body();
}
};
}
|
java
|
JCExpression retype(JCExpression tree, Type erasedType, Type target) {
// System.err.println("retype " + tree + " to " + erasedType);//DEBUG
if (!erasedType.isPrimitive()) {
if (target != null && target.isPrimitive()) {
target = erasure(tree.type);
}
tree.type = erasedType;
if (target != null) {
return coerce(tree, target);
}
}
return tree;
}
|
python
|
def reset_awards(**kwargs):
"""
Resets badges stats.
"""
filter_badges = kwargs.get('badges', None)
exclude_badges = kwargs.get('exclude_badges', None)
for option in [filter_badges, exclude_badges]:
if option:
if not isinstance(option, (list, tuple)):
option = [option]
signals.pre_delete.disconnect(sender=Award,
dispatch_uid='badgify.award.pre_delete.decrement_badge_users_count')
award_qs = Award.objects.all()
badge_qs = Badge.objects.all()
if filter_badges:
award_qs = award_qs.filter(badge__slug__in=filter_badges)
badge_qs = badge_qs.filter(slug__in=filter_badges)
if exclude_badges:
award_qs = award_qs.exclude(badge__slug__in=exclude_badges)
badge_qs = badge_qs.exclude(slug__in=exclude_badges)
awards_count = award_qs.count()
award_qs.delete()
logger.info('✓ Deleted %d awards', awards_count)
badges_count = badge_qs.count()
badge_qs.update(users_count=0)
logger.info('✓ Reseted Badge.users_count field of %d badge(s)', badges_count)
|
java
|
public synchronized List<KafkaDependencyInstanceContext> getDepsByTopicAndEvent(final String topic,
final String regex) {
final Map<String, List<KafkaDependencyInstanceContext>> regexMap = this.topicEventMap.get(topic);
if (regexMap != null) {
return regexMap.get(regex);
}
return Collections.emptyList();
}
|
java
|
@Nullable
public static ExpressionTree getArgument(AnnotationTree annotationTree, String name) {
for (ExpressionTree argumentTree : annotationTree.getArguments()) {
if (argumentTree.getKind() != Tree.Kind.ASSIGNMENT) {
continue;
}
AssignmentTree assignmentTree = (AssignmentTree) argumentTree;
if (!assignmentTree.getVariable().toString().equals(name)) {
continue;
}
ExpressionTree expressionTree = assignmentTree.getExpression();
return expressionTree;
}
return null;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.