language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def get_all_service_user_objects(self, include_machine = False):
"""
Fetches all service user objects from the AD, and returns MSADUser object.
Service user refers to an user whith SPN (servicePrincipalName) attribute set
"""
logger.debug('Polling AD for all user objects, machine accounts included: %s'% include_machine)
if include_machine == True:
ldap_filter = r'(servicePrincipalName=*)'
else:
ldap_filter = r'(&(servicePrincipalName=*)(!(sAMAccountName = *$)))'
attributes = MSADUser.ATTRS
for entry in self.pagedsearch(ldap_filter, attributes):
# TODO: return ldapuser object
yield MSADUser.from_ldap(entry, self._ldapinfo)
logger.debug('Finished polling for entries!') |
java | public void onCreate() {
Bundle bundle = new Bundle();
if (mParentDelegate != null) {
bundle = mParentDelegate.mBundle;
}
onCreate(bundle);
} |
java | public long read(OutputStream stream, long length, long position) throws IOException
{
return delegate.read(stream, length, position);
} |
python | def post_message(name,
channel,
from_name,
message,
api_key=None,
icon=None):
'''
Send a message to a Slack channel.
.. code-block:: yaml
slack-message:
slack.post_message:
- channel: '#general'
- from_name: SuperAdmin
- message: 'This state was executed successfully.'
- api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
The following parameters are required:
name
The unique name for this event.
channel
The channel to send the message to. Must be in the format "#channelname" or "@membername".
from_name
The name of that is to be shown in the "from" field.
message
The message that is to be sent to the Slack channel.
The following parameters are optional:
api_key
The api key for Slack to use for authentication,
if not specified in the configuration options of master or minion.
icon
URL to an image to use as the icon for this message
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if __opts__['test']:
ret['comment'] = 'The following message is to be sent to Slack: {0}'.format(message)
ret['result'] = None
return ret
if not channel:
ret['comment'] = 'Slack channel is missing: {0}'.format(channel)
return ret
if not from_name:
ret['comment'] = 'Slack from name is missing: {0}'.format(from_name)
return ret
if not message:
ret['comment'] = 'Slack message is missing: {0}'.format(message)
return ret
try:
result = __salt__['slack.post_message'](
channel=channel,
message=message,
from_name=from_name,
api_key=api_key,
icon=icon,
)
except SaltInvocationError as sie:
ret['comment'] = 'Failed to send message ({0}): {1}'.format(sie, name)
else:
if isinstance(result, bool) and result:
ret['result'] = True
ret['comment'] = 'Sent message: {0}'.format(name)
else:
ret['comment'] = 'Failed to send message ({0}): {1}'.format(result['message'], name)
return ret |
java | @Override
public Map<String, Collection<String>> getRequireFeatureWithTolerates() {
// The feature may be an older feature which never had the tolerates information
// stored, in which case, look in the older requireFeature field and massage
// that info into the required format.
// Or there may just not be any required features at all.
Collection<RequireFeatureWithTolerates> rfwt = _asset.getWlpInformation().getRequireFeatureWithTolerates();
if (rfwt != null) {
Map<String, Collection<String>> rv = new HashMap<String, Collection<String>>();
for (RequireFeatureWithTolerates feature : rfwt) {
rv.put(feature.getFeature(), feature.getTolerates());
}
return rv;
}
// Newer field not present, check the older field
Collection<String> rf = _asset.getWlpInformation().getRequireFeature();
if (rf != null) {
Map<String, Collection<String>> rv = new HashMap<String, Collection<String>>();
for (String feature : rf) {
rv.put(feature, Collections.<String> emptyList());
}
return rv;
}
// No required features at all
return null;
} |
python | def in_qtconsole():
"""
check if we're inside an IPython qtconsole
DEPRECATED: This is no longer needed, or working, in IPython 3 and above.
"""
try:
ip = get_ipython()
front_end = (
ip.config.get('KernelApp', {}).get('parent_appname', "") or
ip.config.get('IPKernelApp', {}).get('parent_appname', "")
)
if 'qtconsole' in front_end.lower():
return True
except:
return False
return False |
java | @UiThread
public int getParentAdapterPosition() {
int flatPosition = getAdapterPosition();
if (mExpandableAdapter == null || flatPosition == RecyclerView.NO_POSITION) {
return RecyclerView.NO_POSITION;
}
return mExpandableAdapter.getNearestParentPosition(flatPosition);
} |
python | def read(self, encoding="utf8"):
"""
:param encoding:
:return:
"""
with open(self._filename, "rb") as f:
if self.key:
return get_module("mo_math.crypto").decrypt(f.read(), self.key)
else:
content = f.read().decode(encoding)
return content |
java | public static synchronized String char2DOS437( StringBuffer stringbuffer, int i, char c ) {
if (unicode2DOS437 == null) {
unicode2DOS437 = new char[0x10000];
for( int j = 0; j < 256; j++ ) {
char c1;
if ((c1 = unicode[2][j]) != '\uFFFF')
unicode2DOS437[c1] = (char) j;
}
}
if (i != 2) {
StringBuffer stringbuffer1 = new StringBuffer(stringbuffer.length());
for( int k = 0; k < stringbuffer.length(); k++ ) {
char c2 = unicode2DOS437[stringbuffer.charAt(k)];
stringbuffer1.append(c2 == 0 ? c : c2);
}
return new String(stringbuffer1);
} else {
return new String(stringbuffer);
}
} |
python | def proportional_char(self, action):
'''Specifies proportional characters. When turned on, the character spacing set
with charSpacing.
Args:
action: Turn proportional characters on or off.
Returns:
None
Raises:
RuntimeError: Invalid action.
'''
actions = {'off': 0,
'on': 1
}
if action in actions:
self.send(chr(27)+'p'+action)
else:
raise RuntimeError('Invalid action in function proportionalChar') |
python | def update_rejection_permissions(portal):
"""Adds the permission 'Reject Analysis Request' and update the permission
mappings accordingly """
updated = update_rejection_permissions_for(portal, "bika_ar_workflow",
"Reject Analysis Request")
if updated:
brains = api.search(dict(portal_type="AnalysisRequest"),
CATALOG_ANALYSIS_REQUEST_LISTING)
update_rolemappings_for(brains, "bika_ar_workflow")
updated = update_rejection_permissions_for(portal, "bika_sample_workflow",
"Reject Sample")
if updated:
brains = api.search(dict(portal_type="Sample"), "bika_catalog")
update_rolemappings_for(brains, "bika_sample_workflow") |
java | @Override
public synchronized boolean addEntry(Principal caller, Principal principal, String permission)
{
return addEntry(caller, new SecurityAccessControl(principal, permission));
} |
java | public static HLL fromBytes(final byte[] bytes) {
final ISchemaVersion schemaVersion = SerializationUtil.getSchemaVersion(bytes);
final IHLLMetadata metadata = schemaVersion.readMetadata(bytes);
final HLLType type = metadata.HLLType();
final int regwidth = metadata.registerWidth();
final int log2m = metadata.registerCountLog2();
final boolean sparseon = metadata.sparseEnabled();
final int expthresh;
if(metadata.explicitAuto()) {
expthresh = -1;
} else if(metadata.explicitOff()) {
expthresh = 0;
} else {
// NOTE: take into account that the postgres-compatible constructor
// subtracts one before taking a power of two.
expthresh = metadata.log2ExplicitCutoff() + 1;
}
final HLL hll = new HLL(log2m, regwidth, expthresh, sparseon, type);
// Short-circuit on empty, which needs no other deserialization.
if(HLLType.EMPTY.equals(type)) {
return hll;
}
final int wordLength;
switch(type) {
case EXPLICIT:
wordLength = Long.SIZE;
break;
case SPARSE:
wordLength = hll.shortWordLength;
break;
case FULL:
wordLength = hll.regwidth;
break;
default:
throw new RuntimeException("Unsupported HLL type " + type);
}
final IWordDeserializer deserializer =
schemaVersion.getDeserializer(type, wordLength, bytes);
switch(type) {
case EXPLICIT:
// NOTE: This should not exceed expthresh and this will always
// be exactly the number of words that were encoded,
// because the word length is at least a byte wide.
// SEE: IWordDeserializer#totalWordCount()
for(int i=0; i<deserializer.totalWordCount(); i++) {
hll.explicitStorage.add(deserializer.readWord());
}
break;
case SPARSE:
// NOTE: If the shortWordLength were smaller than 8 bits
// (1 byte) there would be a possibility (because of
// padding arithmetic) of having one or more extra
// registers read. However, this is not relevant as the
// extra registers will be all zeroes, which are ignored
// in the sparse representation.
for(int i=0; i<deserializer.totalWordCount(); i++) {
final long shortWord = deserializer.readWord();
final byte registerValue = (byte)(shortWord & hll.valueMask);
// Only set non-zero registers.
if (registerValue != 0) {
hll.sparseProbabilisticStorage.put((int)(shortWord >>> hll.regwidth), registerValue);
}
}
break;
case FULL:
// NOTE: Iteration is done using m (register count) and NOT
// deserializer#totalWordCount() because regwidth may be
// less than 8 and as such the padding on the 'last' byte
// may be larger than regwidth, causing an extra register
// to be read.
// SEE: IWordDeserializer#totalWordCount()
for(long i=0; i<hll.m; i++) {
hll.probabilisticStorage.setRegister(i, deserializer.readWord());
}
break;
default:
throw new RuntimeException("Unsupported HLL type " + type);
}
return hll;
} |
python | async def write(self, data):
"""Writes a chunk of data to the streaming response.
:param data: bytes-ish data to be written.
"""
if type(data) != bytes:
data = self._encode_body(data)
self.protocol.push_data(b"%x\r\n%b\r\n" % (len(data), data))
await self.protocol.drain() |
python | def rasterize_pdf(
input_file,
output_file,
xres,
yres,
raster_device,
log,
pageno=1,
page_dpi=None,
rotation=None,
filter_vector=False,
):
"""Rasterize one page of a PDF at resolution (xres, yres) in canvas units.
The image is sized to match the integer pixels dimensions implied by
(xres, yres) even if those numbers are noninteger. The image's DPI will
be overridden with the values in page_dpi.
:param input_file: pathlike
:param output_file: pathlike
:param xres: resolution at which to rasterize page
:param yres:
:param raster_device:
:param log:
:param pageno: page number to rasterize (beginning at page 1)
:param page_dpi: resolution tuple (x, y) overriding output image DPI
:param rotation: 0, 90, 180, 270: clockwise angle to rotate page
:param filter_vector: if True, remove vector graphics objects
:return:
"""
res = xres, yres
int_res = round(xres), round(yres)
if not page_dpi:
page_dpi = res
with NamedTemporaryFile(delete=True) as tmp:
args_gs = (
[
'gs',
'-dQUIET',
'-dSAFER',
'-dBATCH',
'-dNOPAUSE',
f'-sDEVICE={raster_device}',
f'-dFirstPage={pageno}',
f'-dLastPage={pageno}',
f'-r{str(int_res[0])}x{str(int_res[1])}',
]
+ (['-dFILTERVECTOR'] if filter_vector else [])
+ [
'-o',
tmp.name,
'-dAutoRotatePages=/None', # Probably has no effect on raster
'-f',
fspath(input_file),
]
)
log.debug(args_gs)
p = run(args_gs, stdout=PIPE, stderr=STDOUT, universal_newlines=True)
if _gs_error_reported(p.stdout):
log.error(p.stdout)
else:
log.debug(p.stdout)
if p.returncode != 0:
log.error('Ghostscript rasterizing failed')
raise SubprocessOutputError()
# Ghostscript only accepts integers for output resolution
# if the resolution happens to be fractional, then the discrepancy
# would change the size of the output page, especially if the DPI
# is quite low. Resize the image to the expected size
tmp.seek(0)
with Image.open(tmp) as im:
expected_size = (
round(im.size[0] / int_res[0] * res[0]),
round(im.size[1] / int_res[1] * res[1]),
)
if expected_size != im.size or page_dpi != (xres, yres):
log.debug(
f"Ghostscript: resize output image {im.size} -> {expected_size}"
)
im = im.resize(expected_size)
if rotation is not None:
log.debug("Rotating output by %i", rotation)
# rotation is a clockwise angle and Image.ROTATE_* is
# counterclockwise so this cancels out the rotation
if rotation == 90:
im = im.transpose(Image.ROTATE_90)
elif rotation == 180:
im = im.transpose(Image.ROTATE_180)
elif rotation == 270:
im = im.transpose(Image.ROTATE_270)
if rotation % 180 == 90:
page_dpi = page_dpi[1], page_dpi[0]
im.save(fspath(output_file), dpi=page_dpi) |
java | @Override
public void writeHeader(RandomAccessFile file) throws IOException {
super.writeHeader(file);
file.writeInt(this.k_max);
} |
java | public Callbacks fire(Object... o) {
if (!done) {
done = isOnce;
if (isMemory) {
memory = new ArrayList<>(Arrays.asList(o));
}
if (stack != null)
for (Object c : stack) {
if (!run(c, o) && stopOnFalse) {
break;
}
}
}
return this;
} |
java | protected Map<String,String> getHeadersMap(String headerPart) {
final int len = headerPart.length();
final Map<String,String> headers = new HashMap<String,String>();
int start = 0;
for (;;) {
int end = parseEndOfLine(headerPart, start);
if (start == end) {
break;
}
String header = headerPart.substring(start, end);
start = end + 2;
while (start < len) {
int nonWs = start;
while (nonWs < len) {
char c = headerPart.charAt(nonWs);
if (c != ' ' && c != '\t') {
break;
}
++nonWs;
}
if (nonWs == start) {
break;
}
// continuation line found
end = parseEndOfLine(headerPart, nonWs);
header += " " + headerPart.substring(nonWs, end);
start = end + 2;
}
// parse header line
final int colonOffset = header.indexOf(':');
if (colonOffset == -1) {
// this header line is malformed, skip it.
continue;
}
String headerName = header.substring(0, colonOffset).trim();
String headerValue = header.substring(header.indexOf(':') + 1).trim();
if (headers.containsKey(headerName)) {
headers.put( headerName, headers.get(headerName) + "," + headerValue );
} else {
headers.put(headerName, headerValue);
}
}
return headers;
} |
python | def run_toy_HMC(gpu_id=None):
"""Run HMC on toy dataset"""
X, Y, X_test, Y_test = load_toy()
minibatch_size = Y.shape[0]
noise_precision = 1 / 9.0
net = get_toy_sym(True, noise_precision)
data_shape = (minibatch_size,) + X.shape[1::]
data_inputs = {'data': nd.zeros(data_shape, ctx=dev(gpu_id)),
'teacher_output_label': nd.zeros((minibatch_size, 1), ctx=dev(gpu_id))}
initializer = mx.init.Uniform(0.07)
sample_pool = HMC(net, data_inputs=data_inputs, X=X, Y=Y, X_test=X_test, Y_test=Y_test,
sample_num=300000, initializer=initializer, prior_precision=1.0,
learning_rate=1E-3, L=10, dev=dev(gpu_id)) |
python | def update(self, dictionary=None, **kwargs):
"""
Adds/overwrites all the keys and values from the dictionary.
"""
if not dictionary == None: kwargs.update(dictionary)
for k in list(kwargs.keys()): self[k] = kwargs[k] |
java | void rfftf(final double a[], final int offa) {
if (n == 1)
return;
int l1, l2, na, kh, nf, ip, iw, ido, idl1;
final int twon = 2 * n;
nf = (int) wtable_r[1 + twon];
na = 1;
l2 = n;
iw = twon - 1;
for (int k1 = 1; k1 <= nf; ++k1) {
kh = nf - k1;
ip = (int) wtable_r[kh + 2 + twon];
l1 = l2 / ip;
ido = n / l2;
idl1 = ido * l1;
iw -= (ip - 1) * ido;
na = 1 - na;
switch (ip) {
case 2:
if (na == 0) {
radf2(ido, l1, a, offa, ch, 0, iw);
} else {
radf2(ido, l1, ch, 0, a, offa, iw);
}
break;
case 3:
if (na == 0) {
radf3(ido, l1, a, offa, ch, 0, iw);
} else {
radf3(ido, l1, ch, 0, a, offa, iw);
}
break;
case 4:
if (na == 0) {
radf4(ido, l1, a, offa, ch, 0, iw);
} else {
radf4(ido, l1, ch, 0, a, offa, iw);
}
break;
case 5:
if (na == 0) {
radf5(ido, l1, a, offa, ch, 0, iw);
} else {
radf5(ido, l1, ch, 0, a, offa, iw);
}
break;
default:
if (ido == 1)
na = 1 - na;
if (na == 0) {
radfg(ido, ip, l1, idl1, a, offa, ch, 0, iw);
na = 1;
} else {
radfg(ido, ip, l1, idl1, ch, 0, a, offa, iw);
na = 0;
}
break;
}
l2 = l1;
}
if (na == 1)
return;
System.arraycopy(ch, 0, a, offa, n);
} |
python | def _add_series_only_operations(cls):
"""
Add the series only operations to the cls; evaluate the doc
strings again.
"""
axis_descr, name, name2 = _doc_parms(cls)
def nanptp(values, axis=0, skipna=True):
nmax = nanops.nanmax(values, axis, skipna)
nmin = nanops.nanmin(values, axis, skipna)
warnings.warn("Method .ptp is deprecated and will be removed "
"in a future version. Use numpy.ptp instead.",
FutureWarning, stacklevel=4)
return nmax - nmin
cls.ptp = _make_stat_function(
cls, 'ptp', name, name2, axis_descr,
"""Return the difference between the maximum value and the
minimum value in the object. This is the equivalent of the
``numpy.ndarray`` method ``ptp``.\n\n.. deprecated:: 0.24.0
Use numpy.ptp instead""",
nanptp) |
python | def spdhg_generic(x, f, g, A, tau, sigma, niter, **kwargs):
r"""Computes a saddle point with a stochastic PDHG.
This means, a solution (x*, y*), y* = (y*_1, ..., y*_n) such that
(x*, y*) in arg min_x max_y sum_i=1^n <y_i, A_i> - f*[i](y_i) + g(x)
where g : X -> IR_infty and f[i] : Y[i] -> IR_infty are convex, l.s.c. and
proper functionals. For this algorithm, they all may be non-smooth and no
strong convexity is assumed.
Parameters
----------
x : primal variable
This variable is both input and output of the method.
f : functions
Functionals Y[i] -> IR_infty that all have a convex conjugate with a
proximal operator, i.e.
f[i].convex_conj.proximal(sigma[i]) : Y[i] -> Y[i].
g : function
Functional X -> IR_infty that has a proximal operator, i.e.
g.proximal(tau) : X -> X.
A : functions
Operators A[i] : X -> Y[i] that possess adjoints: A[i].adjoint
tau : scalar / vector / matrix
Step size for primal variable. Note that the proximal operator of g
has to be well-defined for this input.
sigma : scalar
Scalar / vector / matrix used as step size for dual variable. Note that
the proximal operator related to f (see above) has to be well-defined
for this input.
niter : int
Number of iterations
Other Parameters
----------------
y : dual variable, optional
Dual variable is part of a product space. By default equals 0.
z : variable, optional
Adjoint of dual variable, z = A^* y. By default equals 0 if y = 0.
mu_g : scalar
Strong convexity constant of g.
theta : scalar
Global extrapolation factor.
extra: list
List of local extrapolation paramters for every index i. By default
extra_i = 1.
fun_select : function
Function that selects blocks at every iteration IN -> {1,...,n}. By
default this is serial uniform sampling, fun_select(k) selects an index
i \in {1,...,n} with probability 1/n.
callback : callable, optional
Function called with the current iterate after each iteration.
References
----------
[CERS2017] A. Chambolle, M. J. Ehrhardt, P. Richtarik and C.-B. Schoenlieb,
*Stochastic Primal-Dual Hybrid Gradient Algorithm with Arbitrary Sampling
and Imaging Applications*. ArXiv: http://arxiv.org/abs/1706.04957 (2017).
[E+2017] M. J. Ehrhardt, P. J. Markiewicz, P. Richtarik, J. Schott,
A. Chambolle and C.-B. Schoenlieb, *Faster PET reconstruction with a
stochastic primal-dual hybrid gradient method*. Wavelets and Sparsity XVII,
58 (2017) http://doi.org/10.1117/12.2272946.
"""
# Callback object
callback = kwargs.pop('callback', None)
if callback is not None and not callable(callback):
raise TypeError('`callback` {} is not callable'
''.format(callback))
# Dual variable
y = kwargs.pop('y', None)
if y is None:
y = A.range.zero()
# Adjoint of dual variable
z = kwargs.pop('z', None)
if z is None:
if y.norm() == 0:
z = A.domain.zero()
else:
z = A.adjoint(y)
# Strong convexity of g
mu_g = kwargs.pop('mu_g', None)
if mu_g is None:
update_proximal_primal = False
else:
update_proximal_primal = True
# Global extrapolation factor theta
theta = kwargs.pop('theta', 1)
# Second extrapolation factor
extra = kwargs.pop('extra', None)
if extra is None:
extra = [1] * len(sigma)
# Selection function
fun_select = kwargs.pop('fun_select', None)
if fun_select is None:
def fun_select(x):
return [int(np.random.choice(len(A), 1, p=1 / len(A)))]
# Initialize variables
z_relax = z.copy()
dz = A.domain.element()
y_old = A.range.element()
# Save proximal operators
proximal_dual_sigma = [fi.convex_conj.proximal(si)
for fi, si in zip(f, sigma)]
proximal_primal_tau = g.proximal(tau)
# run the iterations
for k in range(niter):
# select block
selected = fun_select(k)
# update primal variable
# tmp = x - tau * z_relax; z_relax used as tmp variable
z_relax.lincomb(1, x, -tau, z_relax)
# x = prox(tmp)
proximal_primal_tau(z_relax, out=x)
# update extrapolation parameter theta
if update_proximal_primal:
theta = float(1 / np.sqrt(1 + 2 * mu_g * tau))
# update dual variable and z, z_relax
z_relax.assign(z)
for i in selected:
# save old yi
y_old[i].assign(y[i])
# tmp = Ai(x)
A[i](x, out=y[i])
# tmp = y_old + sigma_i * Ai(x)
y[i].lincomb(1, y_old[i], sigma[i], y[i])
# y[i]= prox(tmp)
proximal_dual_sigma[i](y[i], out=y[i])
# update adjoint of dual variable
y_old[i].lincomb(-1, y_old[i], 1, y[i])
A[i].adjoint(y_old[i], out=dz)
z += dz
# compute extrapolation
z_relax.lincomb(1, z_relax, 1 + theta * extra[i], dz)
# update the step sizes tau and sigma for acceleration
if update_proximal_primal:
for i in range(len(sigma)):
sigma[i] /= theta
tau *= theta
proximal_dual_sigma = [fi.convex_conj.proximal(si)
for fi, si in zip(f, sigma)]
proximal_primal_tau = g.proximal(tau)
if callback is not None:
callback([x, y]) |
java | private Object doMoskitoProfiling(ProceedingJoinPoint pjp, String statement) throws Throwable {
String statementGeneralized = removeParametersFromStatement(statement);
long callTime = System.nanoTime();
QueryStats cumulatedStats = producer.getDefaultStats();
QueryStats statementStats = null;
try{
statementStats = producer.getStats(statementGeneralized);
}catch(OnDemandStatsProducerException limitReachedException){
log.warn("Query limit reached for query "+statement+", --> "+statementGeneralized);
}
//add Request Count, increase CR,MCR
cumulatedStats.addRequest();
if (statementStats != null)
statementStats.addRequest();
// start stopwatch
boolean success = true;
try {
Object retVal = pjp.proceed();
// stop stopwatch
return retVal;
} catch (Throwable t) {
success = false;
cumulatedStats.notifyError(t);
if (statementStats != null)
statementStats.notifyError();
throw t;
} finally {
final long callDurationTime = System.nanoTime() - callTime;
//add execution time
cumulatedStats.addExecutionTime(callDurationTime);
if (statementStats != null)
statementStats.addExecutionTime(callDurationTime);
//notify request finished / decrease CR/MCR
cumulatedStats.notifyRequestFinished();
if (statementStats != null) {
statementStats.notifyRequestFinished();
}
addTrace(statement, success, callDurationTime);
}
} |
java | public synchronized void write(byte[] buf, int off, int len)
throws IOException
{
super.write(buf, off, len);
crc.update(buf, off, len);
} |
python | def addAllowMAC(self, xEUI):
"""add a given extended address to the whitelist addressfilter
Args:
xEUI: a given extended address in hex format
Returns:
True: successful to add a given extended address to the whitelist entry
False: fail to add a given extended address to the whitelist entry
"""
print '%s call addAllowMAC' % self.port
print xEUI
if isinstance(xEUI, str):
macAddr = xEUI
else:
macAddr = self.__convertLongToString(xEUI)
try:
if self._addressfilterMode != 'whitelist':
if self.__setAddressfilterMode('Whitelist'):
self._addressfilterMode = 'whitelist'
cmd = WPANCTL_CMD + 'insert MAC:Whitelist:Entries %s' % macAddr
ret = self.__sendCommand(cmd)[0] != 'Fail'
self._addressfilterSet.add(macAddr)
print 'current whitelist entries:'
for addr in self._addressfilterSet:
print addr
return ret
except Exception, e:
ModuleHelper.WriteIntoDebugLogger('addAllowMAC() Error: ' + str(e)) |
python | def range_iter(fd, offset, length, chunk=CHUNK):
""" Iterator generator that iterates over chunks in specified range
This generator is meant to be used when returning file descriptor as a
response to Range request (byte serving). It limits the reads to the region
specified by ``offset`` (in bytes form start of the file) and ``limit``
(number of bytes to read), and returns the file contents in chunks of
``chunk`` bytes.
The read offset is set either by using the file descriptor's ``seek()``
method, or by using ``emulate_seek()`` function if file descriptor does not
implement ``seek()``.
The file descriptor is automatically closed when iteration is finished.
"""
force_seek(fd, offset, chunk)
while length > 0:
ret = fd.read(chunk)
if not ret:
return
length -= chunk
yield ret
fd.close() |
java | public <T> F4<P1, P2, P3, P4, T> andThen(final Function<? super R, ? extends T> f) {
E.NPE(f);
final Func4<P1, P2, P3, P4, R> me = this;
return new F4<P1, P2, P3, P4, T>() {
@Override
public T apply(P1 p1, P2 p2, P3 p3, P4 p4) {
R r = me.apply(p1, p2, p3, p4);
return f.apply(r);
}
};
} |
python | def _set_attr(self, **kwargs):
"""Set the attribute of the symbol.
Parameters
----------
**kwargs
The attributes to set
"""
keys = c_str_array(kwargs.keys())
vals = c_str_array([str(s) for s in kwargs.values()])
num_args = mx_uint(len(kwargs))
check_call(_LIB.MXSymbolSetAttrs(
self.handle, num_args, keys, vals)) |
java | public static HashMap<String, HashMap<String, Float>> getAllHypotheses(
BayesianReasonerShanksAgent agent) throws ShanksException {
return ShanksAgentBayesianReasoningCapability.getAllHypotheses(agent
.getBayesianNetwork());
} |
java | @Override
public int read() throws IOException {
if (max >= 0 && pos >= max) {
return -1;
}
int result = in.read();
pos++;
return result;
} |
python | def get_metadata(self):
"""
Fetch repository repomd.xml file
"""
metadata_path = "{}/{}/{}".format(self.url,
self.metadata_dir,
self.metadata_file)
metadata_sig_path = "{}/{}/{}.sig".format(self.url.rstrip('/'),
self.metadata_dir,
self.metadata_file)
# load metadata
req = requests.get(metadata_path)
if req.status_code is 200:
raw_metadata = req.content
else:
raise RepositoryError(metadata_path, ("status code not 200: "
"{}".format(req.status_code)))
if self.gpg_verify:
self.verify_data_signature(metadata_sig_path, metadata_path,
raw_metadata)
return self.parse_metadata(raw_metadata) |
python | def _update_transient_database(
self,
crossmatches,
classifications,
transientsMetadataList,
colMaps):
""" update transient database with classifications and crossmatch results
**Key Arguments:**
- ``crossmatches`` -- the crossmatches and associations resulting from the catlaogue crossmatches
- ``classifications`` -- the classifications assigned to the transients post-crossmatches (dictionary of rank ordered list of classifications)
- ``transientsMetadataList`` -- the list of transient metadata lifted from the database.
- ``colMaps`` -- maps of the important column names for each table/view in the crossmatch-catalogues database
.. todo ::
- update key arguments values and definitions with defaults
- update return values and definitions
- update usage examples and text
- update docstring text
- check sublime snippet exists
- clip any useful text to docs mindmap
- regenerate the docs and check redendering of this docstring
"""
self.log.debug('starting the ``_update_transient_database`` method')
import time
start_time = time.time()
print "UPDATING TRANSIENTS DATABASE WITH RESULTS"
print "DELETING OLD RESULTS"
now = datetime.now()
now = now.strftime("%Y-%m-%d_%H-%M-%S-%f")
transientTable = self.settings["database settings"][
"transients"]["transient table"]
transientTableClassCol = self.settings["database settings"][
"transients"]["transient classification column"]
transientTableIdCol = self.settings["database settings"][
"transients"]["transient primary id column"]
# COMBINE ALL CROSSMATCHES INTO A LIST OF DICTIONARIES TO DUMP INTO
# DATABASE TABLE
transientIDs = [str(c)
for c in classifications.keys()]
transientIDs = ",".join(transientIDs)
# REMOVE PREVIOUS MATCHES
sqlQuery = """delete from sherlock_crossmatches where transient_object_id in (%(transientIDs)s);""" % locals(
)
writequery(
log=self.log,
sqlQuery=sqlQuery,
dbConn=self.transientsDbConn,
)
sqlQuery = """delete from sherlock_classifications where transient_object_id in (%(transientIDs)s);""" % locals(
)
writequery(
log=self.log,
sqlQuery=sqlQuery,
dbConn=self.transientsDbConn,
)
print "FINISHED DELETING OLD RESULTS/ADDING TO CROSSMATCHES: %d" % (time.time() - start_time,)
start_time = time.time()
if len(crossmatches):
insert_list_of_dictionaries_into_database_tables(
dbConn=self.transientsDbConn,
log=self.log,
dictList=crossmatches,
dbTableName="sherlock_crossmatches",
dateModified=True,
batchSize=10000,
replace=True,
dbSettings=self.settings["database settings"][
"transients"]
)
print "FINISHED ADDING TO CROSSMATCHES/UPDATING CLASSIFICATIONS IN TRANSIENT TABLE: %d" % (time.time() - start_time,)
start_time = time.time()
sqlQuery = ""
inserts = []
for k, v in classifications.iteritems():
thisInsert = {
"transient_object_id": k,
"classification": v[0]
}
inserts.append(thisInsert)
print "FINISHED UPDATING CLASSIFICATIONS IN TRANSIENT TABLE/UPDATING sherlock_classifications TABLE: %d" % (time.time() - start_time,)
start_time = time.time()
insert_list_of_dictionaries_into_database_tables(
dbConn=self.transientsDbConn,
log=self.log,
dictList=inserts,
dbTableName="sherlock_classifications",
dateModified=True,
batchSize=10000,
replace=True,
dbSettings=self.settings["database settings"][
"transients"]
)
print "FINISHED UPDATING sherlock_classifications TABLE: %d" % (time.time() - start_time,)
start_time = time.time()
self.log.debug('completed the ``_update_transient_database`` method')
return None |
python | def _async_sub_acc_push(self, acc_id_list):
"""
异步连接指定要接收送的acc id
:param acc_id:
:return:
"""
kargs = {
'acc_id_list': acc_id_list,
'conn_id': self.get_async_conn_id(),
}
ret_code, msg, push_req_str = SubAccPush.pack_req(**kargs)
if ret_code == RET_OK:
self._send_async_req(push_req_str)
return RET_OK, None |
java | public Observable<ManagedClusterInner> getByResourceGroupAsync(String resourceGroupName, String resourceName) {
return getByResourceGroupWithServiceResponseAsync(resourceGroupName, resourceName).map(new Func1<ServiceResponse<ManagedClusterInner>, ManagedClusterInner>() {
@Override
public ManagedClusterInner call(ServiceResponse<ManagedClusterInner> response) {
return response.body();
}
});
} |
python | def can_update(self, user, **kwargs):
"""
Sys admin's can change anything
If the user is an organisation administrator or created the repository,
they may change any field other than "organisation_id"
If the user is a service administrator the user may change the "state"
but no other fields.
"""
if user.is_admin():
raise Return((True, set([])))
is_creator = self.created_by == user.id
if user.is_org_admin(self.organisation_id) or is_creator:
fields = set([])
if 'organisation_id' in kwargs:
fields.add('organisation_id')
if fields:
raise Return((False, fields))
else:
raise Return((True, set([])))
try:
service = yield Service.get(self.service_id)
if user.is_org_admin(service.organisation_id):
fields = set(kwargs) - {'state'}
if fields:
raise Return((False, fields))
else:
raise Return((True, fields))
except couch.NotFound:
# will be handled in Repository.validate
pass
raise Return((False, set([]))) |
java | public static DiscreteFactor getVariances(DiscreteFactor featureFactor, int featureVariableNum) {
return getVariances(Arrays.asList(featureFactor), featureVariableNum);
} |
python | def quoted(arg):
""" Given a string, return a quoted string as per RFC 3501, section 9.
Implementation copied from https://github.com/mjs/imapclient
(imapclient/imapclient.py), 3-clause BSD license
"""
if isinstance(arg, str):
arg = arg.replace('\\', '\\\\')
arg = arg.replace('"', '\\"')
q = '"'
else:
arg = arg.replace(b'\\', b'\\\\')
arg = arg.replace(b'"', b'\\"')
q = b'"'
return q + arg + q |
python | def set_options(self, options):
"""
Configure all the many options we'll need to make this happen.
"""
self.verbosity = int(options.get('verbosity'))
# Will we be gzipping?
self.gzip = getattr(settings, 'BAKERY_GZIP', False)
# And if so what content types will we be gzipping?
self.gzip_content_types = getattr(
settings,
'GZIP_CONTENT_TYPES',
DEFAULT_GZIP_CONTENT_TYPES
)
# What ACL (i.e. security permissions) will be giving the files on S3?
self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL)
# Should we set cache-control headers?
self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {})
# If the user specifies a build directory...
if options.get('build_dir'):
# ... validate that it is good.
if not os.path.exists(options.get('build_dir')):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = options.get("build_dir")
# If the user does not specify a build dir...
else:
# Check if it is set in settings.py
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
# Then make sure it actually exists
if not os.path.exists(settings.BUILD_DIR):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = settings.BUILD_DIR
# If the user provides a bucket name, use that.
if options.get("aws_bucket_name"):
self.aws_bucket_name = options.get("aws_bucket_name")
else:
# Otherwise try to find it the settings
if not hasattr(settings, 'AWS_BUCKET_NAME'):
raise CommandError(self.bucket_unconfig_msg)
self.aws_bucket_name = settings.AWS_BUCKET_NAME
# The bucket prefix, if it exists
self.aws_bucket_prefix = options.get("aws_bucket_prefix")
# If the user sets the --force option
if options.get('force'):
self.force_publish = True
else:
self.force_publish = False
# set the --dry-run option
if options.get('dry_run'):
self.dry_run = True
if self.verbosity > 0:
logger.info("Executing with the --dry-run option set.")
else:
self.dry_run = False
self.no_delete = options.get('no_delete')
self.no_pooling = options.get('no_pooling') |
python | def showpath(path):
"""Format a path for displaying."""
if logger.verbose:
return os.path.abspath(path)
else:
path = os.path.relpath(path)
if path.startswith(os.curdir + os.sep):
path = path[len(os.curdir + os.sep):]
return path |
java | public Object parseObject(String pStr, ParsePosition pStatus) {
Time t = parse(pStr);
pStatus.setIndex(pStr.length()); // Not 100%
return t;
} |
java | protected int resolveSimpleEntity(boolean checkStd)
throws XMLStreamException
{
char[] buf = mInputBuffer;
int ptr = mInputPtr;
char c = buf[ptr++];
// Numeric reference?
if (c == '#') {
c = buf[ptr++];
int value = 0;
int inputLen = mInputEnd;
if (c == 'x') { // hex
while (ptr < inputLen) {
c = buf[ptr++];
if (c == ';') {
break;
}
value = value << 4;
if (c <= '9' && c >= '0') {
value += (c - '0');
} else if (c >= 'a' && c <= 'f') {
value += (10 + (c - 'a'));
} else if (c >= 'A' && c <= 'F') {
value += (10 + (c - 'A'));
} else {
mInputPtr = ptr; // so error points to correct char
throwUnexpectedChar(c, "; expected a hex digit (0-9a-fA-F).");
}
/* Need to check for overflow; easiest to do right as
* it happens...
*/
if (value > MAX_UNICODE_CHAR) {
reportUnicodeOverflow();
}
}
} else { // numeric (decimal)
while (c != ';') {
if (c <= '9' && c >= '0') {
value = (value * 10) + (c - '0');
// Overflow?
if (value > MAX_UNICODE_CHAR) {
reportUnicodeOverflow();
}
} else {
mInputPtr = ptr; // so error points to correct char
throwUnexpectedChar(c, "; expected a decimal number.");
}
if (ptr >= inputLen) {
break;
}
c = buf[ptr++];
}
}
/* We get here either if we got it all, OR if we ran out of
* input in current buffer.
*/
if (c == ';') { // got the full thing
mInputPtr = ptr;
validateChar(value);
return value;
}
/* If we ran out of input, need to just fall back, gets
* resolved via 'full' resolution mechanism.
*/
} else if (checkStd) {
/* Caller may not want to resolve these quite yet...
* (when it wants separate events for non-char entities)
*/
if (c == 'a') { // amp or apos?
c = buf[ptr++];
if (c == 'm') { // amp?
if (buf[ptr++] == 'p') {
if (ptr < mInputEnd && buf[ptr++] == ';') {
mInputPtr = ptr;
return '&';
}
}
} else if (c == 'p') { // apos?
if (buf[ptr++] == 'o') {
int len = mInputEnd;
if (ptr < len && buf[ptr++] == 's') {
if (ptr < len && buf[ptr++] == ';') {
mInputPtr = ptr;
return '\'';
}
}
}
}
} else if (c == 'g') { // gt?
if (buf[ptr++] == 't' && buf[ptr++] == ';') {
mInputPtr = ptr;
return '>';
}
} else if (c == 'l') { // lt?
if (buf[ptr++] == 't' && buf[ptr++] == ';') {
mInputPtr = ptr;
return '<';
}
} else if (c == 'q') { // quot?
if (buf[ptr++] == 'u' && buf[ptr++] == 'o') {
int len = mInputEnd;
if (ptr < len && buf[ptr++] == 't') {
if (ptr < len && buf[ptr++] == ';') {
mInputPtr = ptr;
return '"';
}
}
}
}
}
return 0;
} |
java | public UpdateTableRequest withGlobalSecondaryIndexUpdates(GlobalSecondaryIndexUpdate... globalSecondaryIndexUpdates) {
if (this.globalSecondaryIndexUpdates == null) {
setGlobalSecondaryIndexUpdates(new java.util.ArrayList<GlobalSecondaryIndexUpdate>(globalSecondaryIndexUpdates.length));
}
for (GlobalSecondaryIndexUpdate ele : globalSecondaryIndexUpdates) {
this.globalSecondaryIndexUpdates.add(ele);
}
return this;
} |
python | def on_deleted(self, event):
"""
Called when a file or directory is deleted.
Todo:
May be bugged with inspector and sass compiler since the does not
exists anymore.
Args:
event: Watchdog event, ``watchdog.events.DirDeletedEvent`` or
``watchdog.events.FileDeletedEvent``.
"""
if not self._event_error:
self.logger.info(u"Change detected from deletion of: %s",
event.src_path)
# Never try to compile the deleted source
self.compile_dependencies(event.src_path, include_self=False) |
java | public void wrapDescription(StringBuilder out, int indent, int currentLineIndent, String description) {
int max = commander.getColumnSize();
String[] words = description.split(" ");
int current = currentLineIndent;
for (int i = 0; i < words.length; i++) {
String word = words[i];
if (word.length() > max || current + 1 + word.length() <= max) {
out.append(word);
current += word.length();
if (i != words.length - 1) {
out.append(" ");
current++;
}
} else {
out.append("\n").append(s(indent)).append(word).append(" ");
current = indent + word.length() + 1;
}
}
} |
java | private Path renameToInProgressFile(Path file)
throws IOException {
Path newFile = new Path( file.toString() + inprogress_suffix );
try {
if (hdfs.rename(file, newFile)) {
return newFile;
}
throw new RenameException(file, newFile);
} catch (IOException e){
throw new RenameException(file, newFile, e);
}
} |
python | def normalize_address(self, hostname):
"""Ensure that address returned is an IP address (i.e. not fqdn)"""
if config_get('prefer-ipv6'):
# TODO: add support for ipv6 dns
return hostname
if hostname != unit_get('private-address'):
return get_host_ip(hostname, fallback=hostname)
# Otherwise assume localhost
return '127.0.0.1' |
python | def pack(fmt, *values, **kwargs):
"""Pack the values according to the format string and return a new BitStream.
fmt -- A single string or a list of strings with comma separated tokens
describing how to create the BitStream.
values -- Zero or more values to pack according to the format.
kwargs -- A dictionary or keyword-value pairs - the keywords used in the
format string will be replaced with their given value.
Token examples: 'int:12' : 12 bits as a signed integer
'uint:8' : 8 bits as an unsigned integer
'float:64' : 8 bytes as a big-endian float
'intbe:16' : 2 bytes as a big-endian signed integer
'uintbe:16' : 2 bytes as a big-endian unsigned integer
'intle:32' : 4 bytes as a little-endian signed integer
'uintle:32' : 4 bytes as a little-endian unsigned integer
'floatle:64': 8 bytes as a little-endian float
'intne:24' : 3 bytes as a native-endian signed integer
'uintne:24' : 3 bytes as a native-endian unsigned integer
'floatne:32': 4 bytes as a native-endian float
'hex:80' : 80 bits as a hex string
'oct:9' : 9 bits as an octal string
'bin:1' : single bit binary string
'ue' / 'uie': next bits as unsigned exp-Golomb code
'se' / 'sie': next bits as signed exp-Golomb code
'bits:5' : 5 bits as a bitstring object
'bytes:10' : 10 bytes as a bytes object
'bool' : 1 bit as a bool
'pad:3' : 3 zero bits as padding
>>> s = pack('uint:12, bits', 100, '0xffe')
>>> t = pack(['bits', 'bin:3'], s, '111')
>>> u = pack('uint:8=a, uint:8=b, uint:55=a', a=6, b=44)
"""
tokens = []
if isinstance(fmt, basestring):
fmt = [fmt]
try:
for f_item in fmt:
_, tkns = tokenparser(f_item, tuple(sorted(kwargs.keys())))
tokens.extend(tkns)
except ValueError as e:
raise CreationError(*e.args)
value_iter = iter(values)
s = BitStream()
try:
for name, length, value in tokens:
# If the value is in the kwd dictionary then it takes precedence.
if value in kwargs:
value = kwargs[value]
# If the length is in the kwd dictionary then use that too.
if length in kwargs:
length = kwargs[length]
# Also if we just have a dictionary name then we want to use it
if name in kwargs and length is None and value is None:
s.append(kwargs[name])
continue
if length is not None:
length = int(length)
if value is None and name != 'pad':
# Take the next value from the ones provided
value = next(value_iter)
s._append(BitStream._init_with_token(name, length, value))
except StopIteration:
raise CreationError("Not enough parameters present to pack according to the "
"format. {0} values are needed.", len(tokens))
try:
next(value_iter)
except StopIteration:
# Good, we've used up all the *values.
return s
raise CreationError("Too many parameters present to pack according to the format.") |
python | def SetAttributes(self,
urn,
attributes,
to_delete,
add_child_index=True,
mutation_pool=None):
"""Sets the attributes in the data store."""
attributes[AFF4Object.SchemaCls.LAST] = [
rdfvalue.RDFDatetime.Now().SerializeToDataStore()
]
to_delete.add(AFF4Object.SchemaCls.LAST)
if mutation_pool:
pool = mutation_pool
else:
pool = data_store.DB.GetMutationPool()
pool.MultiSet(urn, attributes, replace=False, to_delete=to_delete)
if add_child_index:
self._UpdateChildIndex(urn, pool)
if mutation_pool is None:
pool.Flush() |
python | def parse(self, what):
"""
:param what:
can be 'rlz-1/ref-asset1', 'rlz-2/sid-1', ...
"""
if '/' not in what:
key, spec = what, ''
else:
key, spec = what.split('/')
if spec and not spec.startswith(('ref-', 'sid-')):
raise ValueError('Wrong specification in %s' % what)
elif spec == '': # export losses for all assets
aids = []
arefs = []
for aid, rec in enumerate(self.assetcol.array):
aids.append(aid)
arefs.append(self.asset_refs[aid])
elif spec.startswith('sid-'): # passed the site ID
sid = int(spec[4:])
aids = []
arefs = []
for aid, rec in enumerate(self.assetcol.array):
if rec['site_id'] == sid:
aids.append(aid)
arefs.append(self.asset_refs[aid])
elif spec.startswith('ref-'): # passed the asset name
arefs = [spec[4:]]
aids = [self.str2asset[arefs[0]]['ordinal']]
else:
raise ValueError('Wrong specification in %s' % what)
return aids, arefs, spec, key |
java | public boolean add() {
if (root == NIL) {
root = nodeAllocator.newNode();
copy(root);
fixAggregates(root);
return true;
} else {
int node = root; assert parent(root) == NIL;
int parent;
int cmp;
do {
cmp = compare(node);
if (cmp < 0) {
parent = node;
node = left(node);
} else if (cmp > 0) {
parent = node;
node = right(node);
} else {
merge(node);
return false;
}
} while (node != NIL);
node = nodeAllocator.newNode();
if (node >= capacity()) {
resize(oversize(node + 1));
}
copy(node);
parent(node, parent);
if (cmp < 0) {
left(parent, node);
} else {
assert cmp > 0;
right(parent, node);
}
rebalance(node);
return true;
}
} |
python | def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name" in the specified app.
If the name does not contain an app name (no colon), an empty list
is returned.
The parent FilesystemLoader.load_template_source() will take care
of the actual loading for us.
"""
if ':' not in template_name:
return []
app_name, template_name = template_name.split(":", 1)
template_dir = get_app_template_dir(app_name)
if template_dir:
try:
from django.template import Origin
origin = Origin(
name=join(template_dir, template_name),
template_name=template_name,
loader=self,
)
except (ImportError, TypeError):
origin = join(template_dir, template_name)
return [origin]
return [] |
java | @Nullable
public static Date createDate (@Nullable final ZonedDateTime aZDT)
{
// The timezone gets lost here
return aZDT == null ? null : Date.from (aZDT.toInstant ());
} |
java | public static void launchIPs(Class<? extends Job> job, String... ips) throws Exception {
Cloud cloud = new Cloud();
cloud.publicIPs.addAll(Arrays.asList(ips));
launch(cloud, job);
} |
java | @Nullable private Object getTypedCellValue(TableFieldSchema fieldSchema, Object v) {
if (Data.isNull(v)) {
return null;
}
if (Objects.equals(fieldSchema.getMode(), "REPEATED")) {
TableFieldSchema elementSchema = fieldSchema.clone().setMode("REQUIRED");
@SuppressWarnings("unchecked")
List<Map<String, Object>> rawCells = (List<Map<String, Object>>) v;
ImmutableList.Builder<Object> values = ImmutableList.builder();
for (Map<String, Object> element : rawCells) {
values.add(getTypedCellValue(elementSchema, element.get("v")));
}
return values.build();
}
if (fieldSchema.getType().equals("RECORD")) {
@SuppressWarnings("unchecked")
Map<String, Object> typedV = (Map<String, Object>) v;
return getTypedTableRow(fieldSchema.getFields(), typedV);
}
if (fieldSchema.getType().equals("FLOAT")) {
return Double.parseDouble((String) v);
}
if (fieldSchema.getType().equals("BOOLEAN")) {
return Boolean.parseBoolean((String) v);
}
if (fieldSchema.getType().equals("TIMESTAMP")) {
return formatTimestamp((String) v);
}
// Returns the original value for:
// 1. String, 2. base64 encoded BYTES, 3. DATE, DATETIME, TIME strings.
return v;
} |
java | @Override protected double score1(Chunk[] chks, double weight, double offset, double[/*nclass*/] fs, int row) {
return score1static(chks, idx_tree(0), offset, fs, row, new Distribution(_parms), _nclass);
} |
python | def get_user_data(user=None,group=None,data_kind=DINGOS_USER_DATA_TYPE_NAME):
"""
Returns either stored settings of a given user or default settings.
This behavior reflects the need for views to have some settings at
hand when running. The settings are returned as dict object.
"""
logger.debug("Get user settings called")
if not user.is_authenticated():
user = None
try:
user_config = UserData.objects.get(user=user,group=group,data_kind=data_kind)
return user_config.retrieve()
except:
return None |
java | public void process( T image1 , T image2 )
{
// declare image data structures
if( pyr1 == null || pyr1.getInputWidth() != image1.width || pyr1.getInputHeight() != image1.height ) {
pyr1 = UtilDenseOpticalFlow.standardPyramid(image1.width, image1.height, scale, sigma, 5, maxLayers, GrayF32.class);
pyr2 = UtilDenseOpticalFlow.standardPyramid(image1.width, image1.height, scale, sigma, 5, maxLayers, GrayF32.class);
pyr1.initialize(image1.width,image1.height);
pyr2.initialize(image1.width,image1.height);
}
norm1.reshape(image1.width, image1.height);
norm2.reshape(image1.width, image1.height);
// normalize input image to make sure alpha is image independent
imageNormalization(image1, image2, norm1, norm2);
// create image pyramid
pyr1.process(norm1);
pyr2.process(norm2);
// compute flow from pyramid
process(pyr1, pyr2);
} |
java | public static String removeSubstring(String inString, String substring) {
StringBuffer result = new StringBuffer();
int oldLoc = 0, loc = 0;
while ((loc = inString.indexOf(substring, oldLoc))!= -1) {
result.append(inString.substring(oldLoc, loc));
oldLoc = loc + substring.length();
}
result.append(inString.substring(oldLoc));
return result.toString();
} |
python | def query(self, query_dict: Dict[str, Any]) -> None:
"""
重写 query
"""
self.parse_url.query = cast(Any, query_dict) |
python | def create_key_ring(
self,
parent,
key_ring_id,
key_ring,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Create a new ``KeyRing`` in a given Project and Location.
Example:
>>> from google.cloud import kms_v1
>>>
>>> client = kms_v1.KeyManagementServiceClient()
>>>
>>> parent = client.location_path('[PROJECT]', '[LOCATION]')
>>>
>>> # TODO: Initialize `key_ring_id`:
>>> key_ring_id = ''
>>>
>>> # TODO: Initialize `key_ring`:
>>> key_ring = {}
>>>
>>> response = client.create_key_ring(parent, key_ring_id, key_ring)
Args:
parent (str): Required. The resource name of the location associated with the
``KeyRings``, in the format ``projects/*/locations/*``.
key_ring_id (str): Required. It must be unique within a location and match the regular
expression ``[a-zA-Z0-9_-]{1,63}``
key_ring (Union[dict, ~google.cloud.kms_v1.types.KeyRing]): A ``KeyRing`` with initial field values.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.kms_v1.types.KeyRing`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.kms_v1.types.KeyRing` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "create_key_ring" not in self._inner_api_calls:
self._inner_api_calls[
"create_key_ring"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_key_ring,
default_retry=self._method_configs["CreateKeyRing"].retry,
default_timeout=self._method_configs["CreateKeyRing"].timeout,
client_info=self._client_info,
)
request = service_pb2.CreateKeyRingRequest(
parent=parent, key_ring_id=key_ring_id, key_ring=key_ring
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("parent", parent)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_key_ring"](
request, retry=retry, timeout=timeout, metadata=metadata
) |
java | public void setWeekDay(String dayString) {
final WeekDay day = WeekDay.valueOf(dayString);
if (m_model.getWeekDay() != day) {
removeExceptionsOnChange(new Command() {
public void execute() {
m_model.setWeekDay(day);
onValueChange();
}
});
}
} |
java | public void marshall(User user, ProtocolMarshaller protocolMarshaller) {
if (user == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(user.getId(), ID_BINDING);
protocolMarshaller.marshall(user.getUsername(), USERNAME_BINDING);
protocolMarshaller.marshall(user.getEmailAddress(), EMAILADDRESS_BINDING);
protocolMarshaller.marshall(user.getGivenName(), GIVENNAME_BINDING);
protocolMarshaller.marshall(user.getSurname(), SURNAME_BINDING);
protocolMarshaller.marshall(user.getOrganizationId(), ORGANIZATIONID_BINDING);
protocolMarshaller.marshall(user.getRootFolderId(), ROOTFOLDERID_BINDING);
protocolMarshaller.marshall(user.getRecycleBinFolderId(), RECYCLEBINFOLDERID_BINDING);
protocolMarshaller.marshall(user.getStatus(), STATUS_BINDING);
protocolMarshaller.marshall(user.getType(), TYPE_BINDING);
protocolMarshaller.marshall(user.getCreatedTimestamp(), CREATEDTIMESTAMP_BINDING);
protocolMarshaller.marshall(user.getModifiedTimestamp(), MODIFIEDTIMESTAMP_BINDING);
protocolMarshaller.marshall(user.getTimeZoneId(), TIMEZONEID_BINDING);
protocolMarshaller.marshall(user.getLocale(), LOCALE_BINDING);
protocolMarshaller.marshall(user.getStorage(), STORAGE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public static CommerceShippingMethod fetchByG_E(long groupId,
String engineKey, boolean retrieveFromCache) {
return getPersistence().fetchByG_E(groupId, engineKey, retrieveFromCache);
} |
python | def getpeptides(self, chain):
"""If peptide ligand chains are defined via the command line options,
try to extract the underlying ligand formed by all residues in the
given chain without water
"""
all_from_chain = [o for o in pybel.ob.OBResidueIter(
self.proteincomplex.OBMol) if o.GetChain() == chain] # All residues from chain
if len(all_from_chain) == 0:
return None
else:
non_water = [o for o in all_from_chain if not o.GetResidueProperty(9)]
ligand = self.extract_ligand(non_water)
return ligand |
java | public ListDeploymentTargetsResult withTargetIds(String... targetIds) {
if (this.targetIds == null) {
setTargetIds(new com.amazonaws.internal.SdkInternalList<String>(targetIds.length));
}
for (String ele : targetIds) {
this.targetIds.add(ele);
}
return this;
} |
python | def load_template(self, template_name, template_source=None,
template_path=None, **template_vars):
"""
Will load a templated configuration on the device.
:param cls: Instance of the driver class.
:param template_name: Identifies the template name.
:param template_source (optional): Custom config template rendered and loaded on device
:param template_path (optional): Absolute path to directory for the configuration templates
:param template_vars: Dictionary with arguments to be used when the template is rendered.
:raise DriverTemplateNotImplemented: No template defined for the device type.
:raise TemplateNotImplemented: The template specified in template_name does not exist in \
the default path or in the custom path if any specified using parameter `template_path`.
:raise TemplateRenderException: The template could not be rendered. Either the template \
source does not have the right format, either the arguments in `template_vars` are not \
properly specified.
"""
return napalm_base.helpers.load_template(self,
template_name,
template_source=template_source,
template_path=template_path,
**template_vars) |
python | def add(self, client):
"""Add a client to the penalty box."""
if client.pool_id in self._client_ids:
log.info("%r is already in the penalty box. Ignoring.", client)
return
release = time.time() + self._min_wait
heapq.heappush(self._clients, (release, (client, self._min_wait)))
self._client_ids.add(client.pool_id) |
python | def ok_embedded_video(node):
"""Check if this embed/video is an ok one to count."""
good_keywords = ('youtube', 'blip.tv', 'vimeo')
node_str = tounicode(node)
for key in good_keywords:
if key in node_str:
return True
return False |
java | protected IScope createLocalVariableScope(EObject featureCall, IScope parent, IFeatureScopeSession session, IResolvedTypes resolvedTypes) {
return new LocalVariableScope(parent, session, asAbstractFeatureCall(featureCall));
} |
java | public void commitAttrValuesInDB()
throws EFapsException
{
synchronized (this.attrUpdated) {
if (this.attrUpdated.size() > 0) {
Connection con = null;
try {
con = Context.getConnection();
final StringBuilder cmd = new StringBuilder();
PreparedStatement stmt = null;
try {
cmd.append("update T_USERPERSON set ");
boolean first = true;
for (final AttrName attrName : this.attrUpdated.keySet()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(attrName.sqlColumn).append("=?");
}
cmd.append(" where ID=").append(getId());
stmt = con.prepareStatement(cmd.toString());
int col = 1;
for (final AttrName attrName : this.attrUpdated.keySet()) {
final String tmp = this.attrUpdated.get(attrName);
if (attrName.integer) {
stmt.setInt(col, tmp == null ? 0 : Integer.parseInt(tmp.trim()));
} else {
stmt.setString(col, tmp == null ? null : tmp.trim());
}
col++;
}
final int rows = stmt.executeUpdate();
if (rows == 0) {
Person.LOG.error("could not update '" + cmd.toString() + "' person with user name '"
+ getName() + "' (id = " + getId() + ")");
throw new EFapsException(Person.class, "commitAttrValuesInDB.NotUpdated", cmd.toString(),
getName(), getId());
}
// TODO: update modified date
} catch (final SQLException e) {
Person.LOG.error("could not update '" + cmd.toString() + "' person with user name '" + getName()
+ "' (id = " + getId() + ")", e);
throw new EFapsException(Person.class, "commitAttrValuesInDB.SQLException", e, cmd.toString(),
getName(), getId());
} finally {
try {
if (stmt != null) {
stmt.close();
}
} catch (final SQLException e) {
throw new EFapsException(Person.class, "commitAttrValuesInDB.SQLException", e, cmd
.toString(), getName(), getId());
}
}
con.commit();
con.close();
} catch (final SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.attrUpdated.clear();
}
}
} |
java | public VirtualFile getFile(VirtualFile mountPoint, VirtualFile target) {
final String path = target.getPathNameRelativeTo(mountPoint);
return rootNode.getFile(new Path(path), mountPoint);
} |
java | public static Map<String, Object> configureProperties(final PropertyResolver resolver,
final Object object) {
//use a default scope of InstanceOnly if the Property doesn't specify it
return configureProperties(resolver, buildDescription(object, DescriptionBuilder.builder()), object,
PropertyScope.InstanceOnly);
} |
java | public static Object stripShallow(Object decorated) {
try {
InjectableProperty delegateProperty = new ConfigClassAnalyzer(decorated.getClass()).getDelegateProperty();
if (delegateProperty == null) {
return decorated;
} else {
return delegateProperty.getValue(decorated);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
} |
python | def check_signature(self, msgbuf, srcSystem, srcComponent):
'''check signature on incoming message'''
if isinstance(msgbuf, array.array):
msgbuf = msgbuf.tostring()
timestamp_buf = msgbuf[-12:-6]
link_id = msgbuf[-13]
(tlow, thigh) = struct.unpack('<IH', timestamp_buf)
timestamp = tlow + (thigh<<32)
# see if the timestamp is acceptable
stream_key = (link_id,srcSystem,srcComponent)
if stream_key in self.signing.stream_timestamps:
if timestamp <= self.signing.stream_timestamps[stream_key]:
# reject old timestamp
# print('old timestamp')
return False
else:
# a new stream has appeared. Accept the timestamp if it is at most
# one minute behind our current timestamp
if timestamp + 6000*1000 < self.signing.timestamp:
# print('bad new stream ', timestamp/(100.0*1000*60*60*24*365), self.signing.timestamp/(100.0*1000*60*60*24*365))
return False
self.signing.stream_timestamps[stream_key] = timestamp
# print('new stream')
h = hashlib.new('sha256')
h.update(self.signing.secret_key)
h.update(msgbuf[:-6])
sig1 = str(h.digest())[:6]
sig2 = str(msgbuf)[-6:]
if sig1 != sig2:
# print('sig mismatch')
return False
# the timestamp we next send with is the max of the received timestamp and
# our current timestamp
self.signing.timestamp = max(self.signing.timestamp, timestamp)
return True |
python | def visitLanguageRange(self, ctx: ShExDocParser.LanguageRangeContext):
""" ShExC: languageRange : LANGTAG (STEM_MARK languagExclusion*)?
ShExJ: valueSetValue = objectValue | LanguageStem | LanguageStemRange """
baselang = ctx.LANGTAG().getText()
if not ctx.STEM_MARK(): # valueSetValue = objectValue / objectValue = ObjectLiteral
vsvalue = Language()
vsvalue.languageTag = baselang[1:]
else:
if ctx.languageExclusion():
vsvalue = LanguageStemRange(LANGTAG(baselang[1:]), exclusions=[])
self._language_exclusions(vsvalue, ctx.languageExclusion())
else:
vsvalue = LanguageStem(LANGTAG(baselang[1:]))
self.nodeconstraint.values.append(vsvalue) |
python | def get(self, sid):
"""
Constructs a DomainContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.api.v2010.account.sip.domain.DomainContext
:rtype: twilio.rest.api.v2010.account.sip.domain.DomainContext
"""
return DomainContext(self._version, account_sid=self._solution['account_sid'], sid=sid, ) |
python | def ProduceExtractionWarning(self, message, path_spec=None):
"""Produces an extraction warning.
Args:
message (str): message of the warning.
path_spec (Optional[dfvfs.PathSpec]): path specification, where None
will use the path specification of current file entry set in
the mediator.
Raises:
RuntimeError: when storage writer is not set.
"""
if not self._storage_writer:
raise RuntimeError('Storage writer not set.')
if not path_spec and self._file_entry:
path_spec = self._file_entry.path_spec
parser_chain = self.GetParserChain()
warning = warnings.ExtractionWarning(
message=message, parser_chain=parser_chain, path_spec=path_spec)
self._storage_writer.AddWarning(warning)
self._number_of_warnings += 1
self.last_activity_timestamp = time.time() |
python | def make_spectrum_plot(model_plot, data_plot, desc, xmin_clamp=0.01,
min_valid_x=None, max_valid_x=None):
"""Make a plot of a spectral model and data.
*model_plot*
A model plot object returned by Sherpa from a call like `ui.get_model_plot()`
or `ui.get_bkg_model_plot()`.
*data_plot*
A data plot object returned by Sherpa from a call like `ui.get_source_plot()`
or `ui.get_bkg_plot()`.
*desc*
Text describing the origin of the data; will be shown in the plot legend
(with "Model" and "Data" appended).
*xmin_clamp*
The smallest "x" (energy axis) value that will be plotted; default is 0.01.
This is needed to allow the plot to be shown on a logarithmic scale if
the energy axes of the model go all the way to 0.
*min_valid_x*
Either None, or the smallest "x" (energy axis) value in which the model and
data are valid; this could correspond to a range specified in the "notice"
command during analysis. If specified, a gray band will be added to the plot
showing the invalidated regions.
*max_valid_x*
Like *min_valid_x* but for the largest "x" (energy axis) value in which the
model and data are valid.
Returns:
A tuple ``(plot, xlow, xhigh)``, where *plot* an OmegaPlot RectPlot
instance, *xlow* is the left edge of the plot bounds, and *xhigh* is the
right edge of the plot bounds.
"""
import omega as om
model_x = np.concatenate((model_plot.xlo, [model_plot.xhi[-1]]))
model_x[0] = max(model_x[0], xmin_clamp)
model_y = np.concatenate((model_plot.y, [0.]))
# Sigh, sometimes Sherpa gives us bad values.
is_bad = ~np.isfinite(model_y)
if is_bad.sum():
from .cli import warn
warn('bad Sherpa model Y value(s) at: %r', np.where(is_bad)[0])
model_y[is_bad] = 0
data_left_edges = data_plot.x - 0.5 * data_plot.xerr
data_left_edges[0] = max(data_left_edges[0], xmin_clamp)
data_hist_x = np.concatenate((data_left_edges, [data_plot.x[-1] + 0.5 * data_plot.xerr[-1]]))
data_hist_y = np.concatenate((data_plot.y, [0.]))
log_bounds_pad_factor = 0.9
xlow = model_x[0] * log_bounds_pad_factor
xhigh = model_x[-1] / log_bounds_pad_factor
p = om.RectPlot()
if min_valid_x is not None:
p.add(om.rect.XBand(1e-3 * xlow, min_valid_x, keyText=None), zheight=-1, dsn=1)
if max_valid_x is not None:
p.add(om.rect.XBand(max_valid_x, xhigh * 1e3, keyText=None), zheight=-1, dsn=1)
csp = om.rect.ContinuousSteppedPainter(keyText=desc + ' Model')
csp.setFloats(model_x, model_y)
p.add(csp)
csp = om.rect.ContinuousSteppedPainter(keyText=None)
csp.setFloats(data_hist_x, data_hist_y)
p.add(csp)
p.addXYErr(data_plot.x, data_plot.y, data_plot.yerr, desc + ' Data', lines=0, dsn=1)
p.setLabels(data_plot.xlabel, data_plot.ylabel)
p.setLinLogAxes(True, False)
p.setBounds (xlow, xhigh)
return p, xlow, xhigh |
java | public BlockchainInfo queryBlockchainInfo() throws ProposalException, InvalidArgumentException {
return queryBlockchainInfo(getShuffledPeers(EnumSet.of(PeerRole.LEDGER_QUERY)), client.getUserContext());
} |
java | public static String extractHtmlBody(String content) {
Matcher startMatcher = BODY_START_PATTERN.matcher(content);
Matcher endMatcher = BODY_END_PATTERN.matcher(content);
int start = 0;
int end = content.length();
if (startMatcher.find()) {
start = startMatcher.end();
}
if (endMatcher.find(start)) {
end = endMatcher.start();
}
return content.substring(start, end);
} |
python | def pst_from_parnames_obsnames(parnames, obsnames,
tplfilename='model.input.tpl', insfilename='model.output.ins'):
"""
Creates a Pst object from a list of parameter names and a list of observation names.
Default values are provided for the TPL and INS
Args:
parnames: list of names from which to make a template file
obsnames: list of obsnames to read in
tplfilename: filename for TPL file (default: model.input.tpl)
insfilename: filename for INS file (default: model.output.ins)
Returns:
Pst object
"""
simple_tpl_from_pars(parnames, tplfilename)
simple_ins_from_obs(obsnames, insfilename)
modelinputfilename = tplfilename.replace('.tpl','')
modeloutputfilename = insfilename.replace('.ins','')
return pyemu.Pst.from_io_files(tplfilename, modelinputfilename, insfilename, modeloutputfilename) |
python | def valid_domains(self):
"""
:return:
A list of unicode strings of valid domain names for the certificate.
Wildcard certificates will have a domain in the form: *.example.com
"""
if self._valid_domains is None:
self._valid_domains = []
# For the subject alt name extension, we can look at the name of
# the choice selected since it distinguishes between domain names,
# email addresses, IPs, etc
if self.subject_alt_name_value:
for general_name in self.subject_alt_name_value:
if general_name.name == 'dns_name' and general_name.native not in self._valid_domains:
self._valid_domains.append(general_name.native)
# If there was no subject alt name extension, and the common name
# in the subject looks like a domain, that is considered the valid
# list. This is done because according to
# https://tools.ietf.org/html/rfc6125#section-6.4.4, the common
# name should not be used if the subject alt name is present.
else:
pattern = re.compile('^(\\*\\.)?(?:[a-zA-Z0-9](?:[a-zA-Z0-9\\-]*[a-zA-Z0-9])?\\.)+[a-zA-Z]{2,}$')
for rdn in self.subject.chosen:
for name_type_value in rdn:
if name_type_value['type'].native == 'common_name':
value = name_type_value['value'].native
if pattern.match(value):
self._valid_domains.append(value)
return self._valid_domains |
python | def tatoeba(language, word, minlength = 10, maxlength = 100):
''' Returns a list of suitable textsamples for a given word using Tatoeba.org. '''
word, sentences = unicode(word), []
page = requests.get('http://tatoeba.org/deu/sentences/search?query=%s&from=%s&to=und' % (word, lltk.locale.iso639_1to3(language)))
tree = html.fromstring(page.text)
for sentence in tree.xpath('//div[contains(concat(" ", normalize-space(@class), " "), " mainSentence ")]/div/a/text()'):
sentence = sentence.strip(u' "„“').replace(u'“ „', u' – ').replace('" "', u' – ')
if word in sentence and len(sentence) < maxlength and len(sentence) > minlength:
sentences.append(sentence)
return sentences |
python | def key_value_pairs(self):
"""
convert list to key value pairs
This should also create unique id's to allow for any
dataset to be transposed, and then later manipulated
r1c1,r1c2,r1c3
r2c1,r2c2,r2c3
should be converted to
ID COLNUM VAL
r1c1,
"""
self.op_data = []
hdrs = self.ip_data[0]
for row in self.ip_data[1:]:
id_col = row[0]
for col_num, col in enumerate(row):
self.op_data.append([id_col, hdrs[col_num], col]) |
python | def _get_adjtime_timezone():
'''
Return the timezone in /etc/adjtime of the system clock
'''
adjtime_file = '/etc/adjtime'
if os.path.exists(adjtime_file):
cmd = ['tail', '-n', '1', adjtime_file]
return __salt__['cmd.run'](cmd, python_shell=False)
elif os.path.exists('/dev/rtc'):
raise CommandExecutionError(
'Unable to get hwclock timezone from ' + adjtime_file
)
else:
# There is no RTC.
return None |
java | public static String readFile(String string) {
InputStream inputStream = getInputStream(string);
if (inputStream != null) {
return new String(readStream(inputStream));
}
return null;
} |
python | def p_var_decl_at(p):
""" var_decl : DIM idlist typedef AT expr
"""
p[0] = None
if len(p[2]) != 1:
syntax_error(p.lineno(1),
'Only one variable at a time can be declared this way')
return
idlist = p[2][0]
entry = SYMBOL_TABLE.declare_variable(idlist[0], idlist[1], p[3])
if entry is None:
return
if p[5].token == 'CONST':
tmp = p[5].expr
if tmp.token == 'UNARY' and tmp.operator == 'ADDRESS': # Must be an ID
if tmp.operand.token == 'VAR':
entry.make_alias(tmp.operand)
elif tmp.operand.token == 'ARRAYACCESS':
if tmp.operand.offset is None:
syntax_error(p.lineno(4), 'Address is not constant. Only constant subscripts are allowed')
return
entry.make_alias(tmp.operand)
entry.offset = tmp.operand.offset
else:
syntax_error(p.lineno(4), 'Only address of identifiers are allowed')
return
elif not is_number(p[5]):
syntax_error(p.lineno(4), 'Address must be a numeric constant expression')
return
else:
entry.addr = str(make_typecast(_TYPE(gl.STR_INDEX_TYPE), p[5], p.lineno(4)).value)
entry.accessed = True
if entry.scope == SCOPE.local:
SYMBOL_TABLE.make_static(entry.name) |
python | def show_message(self, c_attack, c_defend, result, dmg, print_console='Yes'):
"""
function to wrap the display of the battle messages
"""
perc_health_att = '[' + str(round((c_attack.stats['Health']*100) / c_attack.stats['max_health'] )) + '%]'
perc_health_def = '[' + str(round((c_defend.stats['Health']*100) / c_defend.stats['max_health'] )) + '%]'
if result == 'Miss':
txt = c_attack.name + ' ' + perc_health_att.rjust(6) + ' miss ' + c_defend.name + ' ' + perc_health_def.rjust(6)
elif result == 'Crit':
txt = c_attack.name + ' ' + perc_health_att.rjust(6) + ' CRIT ' + c_defend.name + ' ' + perc_health_def.rjust(6)
txt += ' for ' + str(dmg)
else:
txt = c_attack.name + ' ' + perc_health_att.rjust(6) + ' hits ' + c_defend.name + ' ' + perc_health_def.rjust(6)
txt += ' for ' + str(dmg)
if print_console == 'Yes':
print(txt) |
java | public static boolean isInternalRequest(HttpServletRequest request){
//
if(Boolean.parseBoolean(request.getHeader(WebConstants.HEADER_INTERNAL_REQUEST))){
return true;
}
boolean isInner = IpUtils.isInnerIp(request.getServerName());
String forwardHost = request.getHeader(WebConstants.HEADER_FORWARDED_HOST);
//来源于网关
if(StringUtils.isNotBlank(forwardHost)){
isInner = IpUtils.isInnerIp(StringUtils.split(forwardHost, ":")[0]);
}else{
if(!isInner){
isInner = IpUtils.isInnerIp(IpUtils.getinvokerIpAddr(request));
}
}
return isInner;
} |
python | def _register_admin(admin_site, model, admin_class):
""" Register model in the admin, ignoring any previously registered models.
Alternatively it could be used in the future to replace a previously
registered model.
"""
try:
admin_site.register(model, admin_class)
except admin.sites.AlreadyRegistered:
pass |
python | def run(self):
'''命令行交互程序'''
while(True):
oiraw = cInput()
if(len(oiraw) < 1):
break
cutted = self.cut(oiraw, text = True)
print(cutted) |
java | @Override
protected void logImpl(LogLevel level,Object[] message,Throwable throwable)
{
//format log message
String text=this.formatLogMessage(level,message,throwable);
//print text to system out
System.out.println(text);
} |
java | public static int writeIdenticalLevelRun(int prev, CharSequence s, int i, int length, ByteArrayWrapper sink) {
while (i < length) {
// We must have capacity>=SLOPE_MAX_BYTES in case writeDiff() writes that much,
// but we do not want to force the sink to allocate
// for a large min_capacity because we might actually only write one byte.
ensureAppendCapacity(sink, 16, s.length() * 2);
byte[] buffer = sink.bytes;
int capacity = buffer.length;
int p = sink.size;
int lastSafe = capacity - SLOPE_MAX_BYTES_;
while (i < length && p <= lastSafe) {
if (prev < 0x4e00 || prev >= 0xa000) {
prev = (prev & ~0x7f) - SLOPE_REACH_NEG_1_;
} else {
// Unihan U+4e00..U+9fa5:
// double-bytes down from the upper end
prev = 0x9fff - SLOPE_REACH_POS_2_;
}
int c = Character.codePointAt(s, i);
i += Character.charCount(c);
if (c == 0xfffe) {
buffer[p++] = 2; // merge separator
prev = 0;
} else {
p = writeDiff(c - prev, buffer, p);
prev = c;
}
}
sink.size = p;
}
return prev;
} |
python | def _improve_class_docs(app, cls, lines):
"""Improve the documentation of a class."""
if issubclass(cls, models.Model):
_add_model_fields_as_params(app, cls, lines)
elif issubclass(cls, forms.Form):
_add_form_fields(cls, lines) |
java | @Deprecated
public static <T> String marshal(final T data, @NotNull final Class<?>... classesToBeBound) {
return JaxbUtils.marshal(data, classesToBeBound);
} |
java | private boolean waitForWebElementsToBeCreated(){
final long endTime = SystemClock.uptimeMillis() + 5000;
while(SystemClock.uptimeMillis() < endTime){
if(isFinished){
return true;
}
sleeper.sleepMini();
}
return false;
} |
java | private void setMethods(String methodList) {
String[] methodNames = methodList.trim().split(",");
for (String methodName : methodNames) {
HttpMethod method = HttpMethod.valueOf(methodName.trim().toUpperCase());
Utils.require(method != null, "Unknown REST method: " + methodName);
m_methods.add(method);
}
} |
python | def _reset_em(self):
"""Resets self.em and the shared instances."""
self.em = _ExtendedManager(self.addr, self.authkey, mode=self.mode, start=False)
self.em.start()
self._set_shared_instances() |
python | def SegmentMax(a, ids):
"""
Segmented max op.
"""
func = lambda idxs: np.amax(a[idxs], axis=0)
return seg_map(func, a, ids), |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.