language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | private ProcCode createCode(Code defCode) {
if (!defCode.hasPatterns()) {
throw new IllegalStateException("Field pattern can't be null.");
}
if (defCode.getTemplate() == null) {
throw new IllegalStateException("Field template can't be null.");
}
ProcCode code = codes.get(defCode);
if (code == null) {
List<Pattern> confPatterns = defCode.getPatterns();
List<ProcPattern> procPatterns = new ArrayList<ProcPattern>(confPatterns.size());
for (Pattern confPattern : confPatterns) {
procPatterns.add(createPattern(confPattern));
}
code = new ProcCode(
procPatterns,
createTemplate(defCode.getTemplate()),
defCode.getName(),
defCode.getPriority(),
defCode.isTransparent()
);
codes.put(defCode, code);
}
return code;
} |
java | @Override
public AuthnContextClassContext apply(AuthenticationContext input) {
return input != null ? input.getSubcontext(AuthnContextClassContext.class, false) : null;
} |
python | def iter_milestones(self, state=None, sort=None, direction=None,
number=-1, etag=None):
"""Iterates over the milestones on this repository.
:param str state: (optional), state of the milestones, accepted
values: ('open', 'closed')
:param str sort: (optional), how to sort the milestones, accepted
values: ('due_date', 'completeness')
:param str direction: (optional), direction to sort the milestones,
accepted values: ('asc', 'desc')
:param int number: (optional), number of milestones to return.
Default: -1 returns all milestones
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of
:class:`Milestone <github3.issues.milestone.Milestone>`\ s
"""
url = self._build_url('milestones', base_url=self._api)
accepted = {'state': ('open', 'closed'),
'sort': ('due_date', 'completeness'),
'direction': ('asc', 'desc')}
params = {'state': state, 'sort': sort, 'direction': direction}
for (k, v) in list(params.items()):
if not (v and (v in accepted[k])): # e.g., '' or None
del params[k]
if not params:
params = None
return self._iter(int(number), url, Milestone, params, etag) |
java | public static void reset(
final Collection<MasterTriggerRestoreHook<?>> hooks,
final Logger log) throws FlinkException {
for (MasterTriggerRestoreHook<?> hook : hooks) {
final String id = hook.getIdentifier();
try {
hook.reset();
}
catch (Throwable t) {
ExceptionUtils.rethrowIfFatalErrorOrOOM(t);
throw new FlinkException("Error while resetting checkpoint master hook '" + id + '\'', t);
}
}
} |
java | protected void promptRequiredMissingValues(ShellImpl shell, Iterable<InputComponent<?, ?>> inputs)
throws InterruptedException
{
ShellUIPromptImpl prompt = shell.createPrompt(context);
for (InputComponent<?, ?> input : inputs)
{
if (input.isEnabled() && !input.isDeprecated())
{
boolean requiredInputMissing = input.isRequired() && !(input.hasDefaultValue() || input.hasValue());
Object obj = prompt.promptValueFrom(input);
if (obj == null && requiredInputMissing)
{
// No value returned. Just stop testing other inputs
break;
}
}
}
} |
python | def _no_exp(number):
r"""
Convert a number to a string without using scientific notation.
:param number: Number to convert
:type number: integer or float
:rtype: string
:raises: RuntimeError (Argument \`number\` is not valid)
"""
if isinstance(number, bool) or (not isinstance(number, (int, float))):
raise RuntimeError("Argument `number` is not valid")
mant, exp = _to_scientific_tuple(number)
if not exp:
return str(number)
floating_mant = "." in mant
mant = mant.replace(".", "")
if exp < 0:
return "0." + "0" * (-exp - 1) + mant
if not floating_mant:
return mant + "0" * exp + (".0" if isinstance(number, float) else "")
lfpart = len(mant) - 1
if lfpart < exp:
return (mant + "0" * (exp - lfpart)).rstrip(".")
return mant |
java | public void setBloomFilterColumns(java.util.Collection<String> bloomFilterColumns) {
if (bloomFilterColumns == null) {
this.bloomFilterColumns = null;
return;
}
this.bloomFilterColumns = new java.util.ArrayList<String>(bloomFilterColumns);
} |
java | protected Container getContainer(short x) {
int i = this.binarySearch(0, size, x);
if (i < 0) {
return null;
}
return this.values[i];
} |
python | def main(*argv):
""" main driver of program """
try:
# Inputs
#
adminUsername = argv[0]
adminPassword = argv[1]
siteURL = argv[2]
username = argv[3]
subFolders = argv[4].lower() == "true"
# Logic
#
sh = arcrest.AGOLTokenSecurityHandler(adminUsername, adminPassword)
admin = arcrest.manageorg.Administration(url=siteURL,
securityHandler=sh)
content = admin.content
if isinstance(content, arcrest.manageorg._content.Content):pass
usercontent = content.usercontent(username=username)
res = usercontent.listUserContent(username=adminUsername)
# Delete Root Items
#
eItems = ""
itemsToErase = ",".join([item['id'] for item in res['items']])
usercontent.deleteItems(items=itemsToErase)
# Walk Each Folder and erase items if subfolder == True
#
if subFolders:
for folder in res['folders']:
c = usercontent.listUserContent(username=username, folderId=folder['id'])
itemsToErase = ",".join([item['id'] for item in c['items']])
if len(itemsToErase.split(',')) > 0:
usercontent.deleteItems(items=itemsToErase)
del c
usercontent.deleteFolder(folderId=folder['id'])
del folder
arcpy.AddMessage("User %s content has been deleted." % username)
arcpy.SetParameterAsText(4, True)
except arcpy.ExecuteError:
line, filename, synerror = trace()
arcpy.AddError("error on line: %s" % line)
arcpy.AddError("error in file name: %s" % filename)
arcpy.AddError("with error message: %s" % synerror)
arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
except FunctionError, f_e:
messages = f_e.args[0]
arcpy.AddError("error in function: %s" % messages["function"])
arcpy.AddError("error on line: %s" % messages["line"])
arcpy.AddError("error in file name: %s" % messages["filename"])
arcpy.AddError("with error message: %s" % messages["synerror"])
arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
except:
line, filename, synerror = trace()
arcpy.AddError("error on line: %s" % line)
arcpy.AddError("error in file name: %s" % filename)
arcpy.AddError("with error message: %s" % synerror) |
java | @SuppressWarnings("unchecked")
public <A extends Annotation> A getAnnotation(final Method method, final Class<A> annClas) throws AnnotationReadException {
final Class<?> clazz = method.getDeclaringClass();
if(xmlInfo != null && xmlInfo.containsClassInfo(clazz.getName())) {
final ClassInfo classInfo = xmlInfo.getClassInfo(clazz.getName());
if(classInfo.containsMethodInfo(method.getName())) {
MethodInfo methodInfo = classInfo.getMethodInfo(method.getName());
if(methodInfo != null && methodInfo.containsAnnotationInfo(annClas.getName())) {
AnnotationInfo annInfo = methodInfo.getAnnotationInfo(annClas.getName());
try {
return (A)annotationBuilder.buildAnnotation(Class.forName(annInfo.getClassName()), annInfo);
} catch (ClassNotFoundException e) {
throw new AnnotationReadException(String.format("not found class '%s'", annInfo.getClassName()), e);
}
}
}
}
return method.getAnnotation(annClas);
} |
java | public static synchronized ExecutionSignature createSignature(
final Class<? extends AbstractInvokable> invokableClass, final JobID jobID) {
// First, try to load message digest algorithm, if necessary
if (messageDigest == null) {
try {
messageDigest = MessageDigest.getInstance(HASHINGALGORITHM);
} catch (NoSuchAlgorithmException e) {
LOG.error("Unable to load message digest algorithm " + HASHINGALGORITHM);
return null;
}
}
// Reset digest buffer and add the name of the invokable class to the message digest buffer
messageDigest.reset();
messageDigest.update(invokableClass.getName().getBytes());
String[] requiredJarFiles;
// Next, retrieve the JAR-files associated with this job
try {
requiredJarFiles = LibraryCacheManager.getRequiredJarFiles(jobID);
} catch (IOException ioe) {
// Output an error message and return
LOG.error("Cannot access library cache manager for job ID " + jobID);
return null;
}
// Now, sort the list of JAR-files in order to always calculate the signature in the same manner
Arrays.sort(requiredJarFiles);
// Finally, add the names of the JAR-files to the hash calculation
for (int i = 0; i < requiredJarFiles.length; i++) {
messageDigest.update(requiredJarFiles[i].getBytes());
}
return new ExecutionSignature(messageDigest.digest());
} |
python | def get_metric_details(self, m, **kwargs): # noqa: E501
"""Get more details on a metric, including reporting sources and approximate last time reported # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_metric_details(m, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str m: Metric name (required)
:param int l: limit
:param str c: cursor value to continue if the number of results exceeds 1000
:param list[str] h: glob pattern for sources to include in the query result
:return: MetricDetailsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_metric_details_with_http_info(m, **kwargs) # noqa: E501
else:
(data) = self.get_metric_details_with_http_info(m, **kwargs) # noqa: E501
return data |
python | def find_library(library_root, additional_places=None):
"""
Returns the name of the library without extension
:param library_root: root of the library to search, for example "cfitsio_" will match libcfitsio_1.2.3.4.so
:return: the name of the library found (NOTE: this is *not* the path), and a directory path if the library is not
in the system paths (and None otherwise). The name of libcfitsio_1.2.3.4.so will be cfitsio_1.2.3.4, in other words,
it will be what is needed to be passed to the linker during a c/c++ compilation, in the -l option
"""
# find_library searches for all system paths in a system independent way (but NOT those defined in
# LD_LIBRARY_PATH or DYLD_LIBRARY_PATH)
first_guess = ctypes.util.find_library(library_root)
if first_guess is not None:
# Found in one of the system paths
if sys.platform.lower().find("linux") >= 0:
# On linux the linker already knows about these paths, so we
# can return None as path
return sanitize_lib_name(first_guess), None
elif sys.platform.lower().find("darwin") >= 0:
# On Mac we still need to return the path, because the linker sometimes
# does not look into it
return sanitize_lib_name(first_guess), os.path.dirname(first_guess)
else:
# Windows is not supported
raise NotImplementedError("Platform %s is not supported" % sys.platform)
else:
# could not find it. Let's examine LD_LIBRARY_PATH or DYLD_LIBRARY_PATH
# (if they sanitize_lib_name(first_guess), are not defined, possible_locations will become [""] which will
# be handled by the next loop)
if sys.platform.lower().find("linux") >= 0:
# Unix / linux
possible_locations = os.environ.get("LD_LIBRARY_PATH", "").split(":")
elif sys.platform.lower().find("darwin") >= 0:
# Mac
possible_locations = os.environ.get("DYLD_LIBRARY_PATH", "").split(":")
else:
raise NotImplementedError("Platform %s is not supported" % sys.platform)
if additional_places is not None:
possible_locations.extend(additional_places)
# Now look into the search paths
library_name = None
library_dir = None
for search_path in possible_locations:
if search_path == "":
# This can happen if there are more than one :, or if nor LD_LIBRARY_PATH
# nor DYLD_LIBRARY_PATH are defined (because of the default use above for os.environ.get)
continue
results = glob.glob(os.path.join(search_path, "lib%s*" % library_root))
if len(results) >= 1:
# Results contain things like libXS.so, libXSPlot.so, libXSpippo.so
# If we are looking for libXS.so, we need to make sure that we get the right one!
for result in results:
if re.match("lib%s[\-_\.]" % library_root, os.path.basename(result)) is None:
continue
else:
# FOUND IT
# This is the full path of the library, like /usr/lib/libcfitsio_1.2.3.4
library_name = result
library_dir = search_path
break
else:
continue
if library_name is not None:
break
if library_name is None:
return None, None
else:
# Sanitize the library name to get from the fully-qualified path to just the library name
# (/usr/lib/libgfortran.so.3.0 becomes gfortran)
return sanitize_lib_name(library_name), library_dir |
java | public DeleteTableResponse deleteTable(String tableName) {
checkNotNull(tableName, "request should not be null.");
InternalRequest httpRequest = createRequestUnderInstance(HttpMethodName.DELETE,
MolaDbConstants.URI_TABLE,
tableName);
DeleteTableResponse ret = this.invokeHttpClient(httpRequest, DeleteTableResponse.class);
return ret;
} |
java | public final void commit() throws XAException
{
if (tc.isEntryEnabled()) Tr.entry(tc, "commit", _resource);
Tr.warning(tc, "WTRN0018_1PC_RESOURCE_DOES_NOT_SUPPORT_COMMIT");
if (tc.isEntryEnabled()) Tr.exit(tc, "commit");
throw new XAException(XAException.XAER_PROTO);
} |
java | public static int getInt(String key, int def) {
String value = get(key);
if (value == null) {
return def;
}
value = value.trim();
try {
return Integer.parseInt(value);
} catch (Exception e) {
// Ignore
}
logger.warn(
"Unable to parse the integer system property '{}':{} - using the default value: {}",
key, value, def
);
return def;
} |
java | public String getContextPath() {
ServletRequest request = pageContext.getRequest();
if (request instanceof HttpServletRequest) {
return ((HttpServletRequest) request).getContextPath();
}
return "/";
} |
python | def ap(u, d, M, step, K, eps=0.001, leak=0, initCoeffs=None, N=None,
returnCoeffs=False):
"""
Perform affine projection (AP) adaptive filtering on u to minimize error
given by e=d-y, where y is the output of the adaptive filter.
Parameters
----------
u : array-like
One-dimensional filter input.
d : array-like
One-dimensional desired signal, i.e., the output of the unknown FIR
system which the adaptive filter should identify. Must have length >=
len(u), or N+M-1 if number of iterations are limited (via the N
parameter).
M : int
Desired number of filter taps (desired filter order + 1), must be
non-negative.
step : float
Step size of the algorithm, must be non-negative.
K : int
Projection order, must be integer larger than zero.
Optional Parameters
-------------------
eps : float
Regularization factor to avoid numerical issues when power of input
is close to zero. Defaults to 0.001. Must be non-negative.
leak : float
Leakage factor, must be equal to or greater than zero and smaller than
one. When greater than zero a leaky LMS filter is used. Defaults to 0,
i.e., no leakage.
initCoeffs : array-like
Initial filter coefficients to use. Should match desired number of
filter taps, defaults to zeros.
N : int
Number of iterations to run. Must be less than or equal to len(u)-M+1.
Defaults to len(u)-M+1.
returnCoeffs : boolean
If true, will return all filter coefficients for every iteration in an
N x M matrix. Does not include the initial coefficients. If false, only
the latest coefficients in a vector of length M is returned. Defaults
to false.
Returns
-------
y : numpy.array
Output values of LMS filter, array of length N.
e : numpy.array
Error signal, i.e, d-y. Array of length N.
w : numpy.array
Final filter coefficients in array of length M if returnCoeffs is
False. NxM array containing all filter coefficients for all iterations
otherwise.
Raises
------
TypeError
If number of filter taps M is not type integer, number of iterations N
is not type integer, or leakage leak is not type float/int.
ValueError
If number of iterations N is greater than len(u)-M, number of filter
taps M is negative, or if step-size or leakage is outside specified
range.
Minimal Working Example
-----------------------
>>> import numpy as np
>>>
>>> np.random.seed(1337)
>>> ulen = 2000
>>> coeff = np.concatenate(([4], np.zeros(10), [-11], np.zeros(7), [0.7]))
>>> u = np.random.randn(ulen)
>>> d = np.convolve(u, coeff)
>>>
>>> M = 20 # No. of taps
>>> step = 1 # Step size
>>> K = 5 # Projection order
>>> y, e, w = ap(u, d, M, step, K)
>>> print np.allclose(w, coeff)
True
Extended Example
----------------
>>> import numpy as np
>>>
>>> np.random.seed(1337)
>>> N = 1000
>>> coeffs = np.concatenate(([13], np.zeros(9), [-3], np.zeros(8), [-.2]))
>>> u = np.random.randn(20000) # Note: len(u) >> N but we limit iterations
>>> d = np.convolve(u, coeffs)
>>>
>>> M = 20 # No. of taps
>>> step = 1 # Step size
>>> K = 5 # Projection order
>>> y, e, w = ap(u, d, M, step, K, N=N, returnCoeffs=True)
>>> y.shape == (N,)
True
>>> e.shape == (N,)
True
>>> w.shape == (N, M)
True
>>> # Calculate mean square weight error
>>> mswe = np.mean((w - coeffs)**2, axis=1)
>>> # Should never increase so diff should above be > 0
>>> diff = np.diff(mswe)
>>> (diff <= 1e-10).all()
True
"""
# Check epsilon
_pchk.checkRegFactor(eps)
# Check projection order
_pchk.checkProjectOrder(K)
# Num taps check
_pchk.checkNumTaps(M)
# Max iteration check
if N is None:
N = len(u)-M-K+1
_pchk.checkIter(N, len(u)-M+1)
# Check len(d)
_pchk.checkDesiredSignal(d, N, M)
# Step check
_pchk.checkStep(step)
# Leakage check
_pchk.checkLeakage(leak)
# Init. coeffs check
if initCoeffs is None:
initCoeffs = np.zeros(M)
else:
_pchk.checkInitCoeffs(initCoeffs, M)
# Initialization
y_out = np.zeros(N) # Filter output
e_out = np.zeros(N) # Error signal
w = initCoeffs # Initial filter coeffs
I = np.identity(K) # Init. identity matrix for faster loop matrix inv.
epsI = eps * np.identity(K) # Init. epsilon identiy matrix
leakstep = (1 - step*leak)
if returnCoeffs:
W = np.zeros((N, M)) # Matrix to hold coeffs for each iteration
# Perform filtering
for n in xrange(N):
# Generate U matrix and D vector with current data
U = np.zeros((M, K))
for k in np.arange(K):
U[:, (K-k-1)] = u[n+k:n+M+k]
U = np.flipud(U)
D = np.flipud(d[n+M-1:n+M+K-1])
# Filter
y = np.dot(U.T, w)
e = D - y
y_out[n] = y[0]
e_out[n] = e[0]
# Normalization factor
normFactor = np.linalg.solve(epsI + np.dot(U.T, U), I)
# Naive alternative
# normFactor = np.linalg.inv(epsI + np.dot(U.T, U))
w = leakstep * w + step * np.dot(U, np.dot(normFactor, e))
if returnCoeffs:
W[n] = w
if returnCoeffs:
w = W
return y_out, e_out, w |
python | def _traverse_relationship_objs(self, rel2src2dsts, goobj_child, goids_seen):
"""Traverse from source GO up relationships."""
child_id = goobj_child.id
goids_seen.add(child_id)
##A self.go2obj[child_id] = goobj_child
# Update goids_seen and go2obj with child alt_ids
for goid_altid in goobj_child.alt_ids:
goids_seen.add(goid_altid)
##A self.go2obj[goid_altid] = goobj_child
# Loop through relationships of child object
for reltype, recs in goobj_child.relationship.items():
if reltype in self.relationships:
for relationship_obj in recs:
relationship_id = relationship_obj.id
rel2src2dsts[reltype][relationship_id].add(child_id)
# If relationship has not been seen, traverse
if relationship_id not in goids_seen:
self._traverse_relationship_objs(rel2src2dsts, relationship_obj, goids_seen) |
java | @Override
public List<Wave> postBootWaveList() {
final List<Wave> waveList = new ArrayList<>();
// Get Java command line parameters
final Parameters p = getParameters();
if (p.getRaw().size() >= 1) {
// The first parameter must contains the log file to parse
final String logFileName = p.getRaw().get(0);
final File logFile = new File(logFileName);
if (logFile.exists()) {
// Call the service that will load and parse the log file
waveList.add(
WBuilder.wave()
.waveGroup(WaveGroup.RETURN_DATA)
.waveType(LoadEdtFileService.DO_LOAD_EVENTS)
.componentClass(LoadEdtFileService.class)
.addDatas(WBuilder.waveData(EditorWaves.EVENTS_FILE, logFile)));
// Start the animation to show all components creation
waveList.add(WBuilder.wave().waveType(EditorWaves.DO_PLAY));
}
}
return waveList;
} |
python | def log(self, *lines):
"""will print out the lines in console if debug is enabled for the
specific sprite"""
if getattr(self, "debug", False):
print(dt.datetime.now().time(), end=' ')
for line in lines:
print(line, end=' ')
print() |
java | public Symbol getSymbolForInstancesOf(Symbol sym) {
FunctionType fn = sym.getFunctionType();
if (fn != null && fn.isNominalConstructor()) {
return getSymbolForInstancesOf(fn);
}
return null;
} |
python | def configure_logger(self, tc_config_log_filename=None, tc_output_log_filename=None):
"""Configure selenium instance logger
:param tc_config_log_filename: test case specific logging config file
:param tc_output_log_filename: test case specific output logger file
"""
# Get config logger filename
config_log_filename = DriverWrappersPool.get_configured_value('Config_log_filename', tc_config_log_filename,
'logging.conf')
config_log_filename = os.path.join(DriverWrappersPool.config_directory, config_log_filename)
# Configure logger only if logging filename has changed
if self.config_log_filename != config_log_filename:
# Get output logger filename
output_log_filename = DriverWrappersPool.get_configured_value('Output_log_filename', tc_output_log_filename,
'toolium.log')
output_log_filename = os.path.join(DriverWrappersPool.output_directory, output_log_filename)
output_log_filename = output_log_filename.replace('\\', '\\\\')
try:
logging.config.fileConfig(config_log_filename, {'logfilename': output_log_filename}, False)
except Exception as exc:
print("[WARN] Error reading logging config file '{}': {}".format(config_log_filename, exc))
self.config_log_filename = config_log_filename
self.output_log_filename = output_log_filename
self.logger = logging.getLogger(__name__) |
python | def _get_acceptable_response_type():
"""Return the mimetype for this request."""
if ('Accept' not in request.headers or request.headers['Accept'] in
ALL_CONTENT_TYPES):
return JSON
acceptable_content_types = set(
request.headers['ACCEPT'].strip().split(','))
if acceptable_content_types & HTML_CONTENT_TYPES:
return HTML
elif acceptable_content_types & JSON_CONTENT_TYPES:
return JSON
else:
# HTTP 406 Not Acceptable
raise InvalidAPIUsage(406) |
python | def _build_object(self, nm, clsdata, parents,**kw):
logger.debug(util.lazy_format("Building object {0}", nm))
# To support circular references, we tag objects that we're
# currently building as "under construction"
self.under_construction.add(nm)
props = {}
defaults = set()
properties = {}
for p in parents:
properties = util.propmerge(properties, p.__propinfo__)
if 'properties' in clsdata:
properties = util.propmerge(properties, clsdata['properties'])
name_translation = {}
for prop, detail in properties.items():
logger.debug(util.lazy_format("Handling property {0}.{1}",nm, prop))
properties[prop]['raw_name'] = prop
name_translation[prop] = prop.replace('@', '')
prop = name_translation[prop]
if detail.get('default', None) is not None:
defaults.add(prop)
if detail.get('type', None) == 'object':
uri = "{0}/{1}_{2}".format(nm,
prop, "<anonymous>")
self.resolved[uri] = self.construct(
uri,
detail,
(ProtocolBase,))
props[prop] = make_property(prop,
{'type': self.resolved[uri]},
self.resolved[uri].__doc__)
properties[prop]['type'] = self.resolved[uri]
elif 'type' not in detail and '$ref' in detail:
ref = detail['$ref']
uri = util.resolve_ref_uri(self.resolver.resolution_scope, ref)
logger.debug(util.lazy_format(
"Resolving reference {0} for {1}.{2}",
ref, nm, prop
))
if uri in self.resolved:
typ = self.resolved[uri]
else:
typ = self.construct(uri, detail, (ProtocolBase,))
props[prop] = make_property(prop,
{'type': typ},
typ.__doc__)
properties[prop]['$ref'] = uri
properties[prop]['type'] = typ
elif 'oneOf' in detail:
potential = self.resolve_classes(detail['oneOf'])
logger.debug(util.lazy_format("Designating {0} as oneOf {1}", prop, potential))
desc = detail[
'description'] if 'description' in detail else ""
props[prop] = make_property(prop,
{'type': potential}, desc
)
elif 'type' in detail and detail['type'] == 'array':
if 'items' in detail and isinstance(detail['items'], dict):
if '$ref' in detail['items']:
uri = util.resolve_ref_uri(
self.resolver.resolution_scope,
detail['items']['$ref'])
typ = self.construct(uri, detail['items'])
constraints = copy.copy(detail)
constraints['strict'] = kw.get('strict')
propdata = {
'type': 'array',
'validator': python_jsonschema_objects.wrapper_types.ArrayWrapper.create(
uri,
item_constraint=typ,
**constraints)}
else:
uri = "{0}/{1}_{2}".format(nm,
prop, "<anonymous_field>")
try:
if 'oneOf' in detail['items']:
typ = TypeProxy([
self.construct(uri + "_%s" % i, item_detail)
if '$ref' not in item_detail else
self.construct(util.resolve_ref_uri(
self.resolver.resolution_scope,
item_detail['$ref']),
item_detail)
for i, item_detail in enumerate(detail['items']['oneOf'])]
)
else:
typ = self.construct(uri, detail['items'])
constraints = copy.copy(detail)
constraints['strict'] = kw.get('strict')
propdata = {'type': 'array',
'validator': python_jsonschema_objects.wrapper_types.ArrayWrapper.create(
uri,
item_constraint=typ,
**constraints)}
except NotImplementedError:
typ = detail['items']
constraints = copy.copy(detail)
constraints['strict'] = kw.get('strict')
propdata = {'type': 'array',
'validator': python_jsonschema_objects.wrapper_types.ArrayWrapper.create(
uri,
item_constraint=typ,
**constraints)}
props[prop] = make_property(prop,
propdata,
typ.__doc__)
elif 'items' in detail:
typs = []
for i, elem in enumerate(detail['items']):
uri = "{0}/{1}/<anonymous_{2}>".format(nm, prop, i)
typ = self.construct(uri, elem)
typs.append(typ)
props[prop] = make_property(prop,
{'type': typs},
)
else:
desc = detail[
'description'] if 'description' in detail else ""
uri = "{0}/{1}".format(nm, prop)
typ = self.construct(uri, detail)
props[prop] = make_property(prop, {'type': typ}, desc)
""" If this object itself has a 'oneOf' designation, then
make the validation 'type' the list of potential objects.
"""
if 'oneOf' in clsdata:
klasses = self.resolve_classes(clsdata['oneOf'])
# Need a validation to check that it meets one of them
props['__validation__'] = {'type': klasses}
props['__extensible__'] = pattern_properties.ExtensibleValidator(
nm,
clsdata,
self)
props['__prop_names__'] = name_translation
props['__propinfo__'] = properties
required = set.union(*[p.__required__ for p in parents])
if 'required' in clsdata:
for prop in clsdata['required']:
required.add(prop)
invalid_requires = [req for req in required if req not in props['__propinfo__']]
if len(invalid_requires) > 0:
raise validators.ValidationError("Schema Definition Error: {0} schema requires "
"'{1}', but properties are not defined"
.format(nm, invalid_requires))
props['__required__'] = required
props['__has_default__'] = defaults
if required and kw.get("strict"):
props['__strict__'] = True
props['__title__'] = clsdata.get('title')
cls = type(str(nm.split('/')[-1]), tuple(parents), props)
self.under_construction.remove(nm)
return cls |
python | def remove_keywords_from_list(self, keyword_list):
"""To remove keywords present in list
Args:
keyword_list (list(str)): List of keywords to remove
Examples:
>>> keyword_processor.remove_keywords_from_list(["java", "python"]})
Raises:
AttributeError: If `keyword_list` is not a list.
"""
if not isinstance(keyword_list, list):
raise AttributeError("keyword_list should be a list")
for keyword in keyword_list:
self.remove_keyword(keyword) |
java | public void index(Record record) {
if (directory == null)
init();
if (!overwrite && path != null)
delete(record);
Document doc = new Document();
for (String propname : record.getProperties()) {
Property prop = config.getPropertyByName(propname);
if (prop == null)
throw new DukeConfigException("Record has property " + propname +
" for which there is no configuration");
if (prop.getComparator() instanceof GeopositionComparator &&
geoprop != null) {
// index specially as geocoordinates
String v = record.getValue(propname);
if (v == null || v.equals(""))
continue;
// this gives us a searchable geoindexed value
for (IndexableField f : geoprop.createIndexableFields(v))
doc.add(f);
// this preserves the coordinates in readable form for display purposes
doc.add(new Field(propname, v, Field.Store.YES,
Field.Index.NOT_ANALYZED));
} else {
Field.Index ix;
if (prop.isIdProperty())
ix = Field.Index.NOT_ANALYZED; // so findRecordById will work
else // if (prop.isAnalyzedProperty())
ix = Field.Index.ANALYZED;
// FIXME: it turns out that with the StandardAnalyzer you can't have a
// multi-token value that's not analyzed if you want to find it again...
// else
// ix = Field.Index.NOT_ANALYZED;
Float boost = getBoostFactor(prop.getHighProbability(), BoostMode.INDEX);
for (String v : record.getValues(propname)) {
if (v.equals(""))
continue; // FIXME: not sure if this is necessary
Field field = new Field(propname, v, Field.Store.YES, ix);
if (boost != null)
field.setBoost(boost);
doc.add(field);
}
}
}
try {
iwriter.addDocument(doc);
} catch (IOException e) {
throw new DukeException(e);
}
} |
java | public Cell<C,T> maxSize (float width, float height) {
maxWidth = new FixedValue<C, T>(layout.toolkit,width);
maxHeight = new FixedValue<C, T>(layout.toolkit,height);
return this;
} |
python | def admin_request_announcement_email(request, form, obj):
"""Send an admin announcement request email.
form: The announcement request form
obj: The announcement request object
"""
subject = "News Post Approval Needed ({})".format(obj.title)
emails = [settings.APPROVAL_EMAIL]
base_url = request.build_absolute_uri(reverse('index'))
data = {
"req": obj,
"formdata": form.data,
"info_link": request.build_absolute_uri(reverse("admin_approve_announcement", args=[obj.id])),
"base_url": base_url
}
email_send("announcements/emails/admin_approve.txt", "announcements/emails/admin_approve.html", data, subject, emails) |
java | public static void setLearningRate(ComputationGraph net, ISchedule newLrSchedule) {
setLearningRate(net, Double.NaN, newLrSchedule);
} |
java | public static Object getDefaultValue(Class<? extends Annotation> annotationType, String attributeName) {
try {
Method method = annotationType.getDeclaredMethod(attributeName, new Class[0]);
return method.getDefaultValue();
}
catch (Exception ex) {
return null;
}
} |
java | protected void processTopicSectionInfo(final BuildData buildData, final BaseTopicWrapper<?> topic, final Document doc) {
if (doc == null || topic == null) return;
final String infoName;
if (buildData.getDocBookVersion() == DocBookVersion.DOCBOOK_50) {
infoName = "info";
} else {
infoName = DocBookUtilities.TOPIC_ROOT_SECTIONINFO_NODE_NAME;
}
final CollectionWrapper<TagWrapper> tags = topic.getTags();
final List<Integer> seoCategoryIds = buildData.getServerSettings().getSEOCategoryIds();
if (seoCategoryIds != null && !seoCategoryIds.isEmpty() && tags != null && tags.getItems() != null && tags.getItems().size() > 0) {
// Find the sectioninfo node in the document, or create one if it doesn't exist
final Element sectionInfo;
final List<Node> sectionInfoNodes = XMLUtilities.getDirectChildNodes(doc.getDocumentElement(), infoName);
if (sectionInfoNodes.size() == 1) {
sectionInfo = (Element) sectionInfoNodes.get(0);
} else {
sectionInfo = doc.createElement(infoName);
}
// Build up the keywordset
final Element keywordSet = doc.createElement("keywordset");
final List<TagWrapper> tagItems = tags.getItems();
for (final TagWrapper tag : tagItems) {
if (tag.getName() == null || tag.getName().isEmpty()) continue;
if (tag.containedInCategories(seoCategoryIds)) {
final Element keyword = doc.createElement("keyword");
keyword.setTextContent(tag.getName());
keywordSet.appendChild(keyword);
}
}
// Only update the section info if we've added data
if (keywordSet.hasChildNodes()) {
sectionInfo.appendChild(keywordSet);
DocBookUtilities.setInfo(buildData.getDocBookVersion(), sectionInfo, doc.getDocumentElement());
}
}
} |
java | protected void initContext() {
context = createContext();
SmartFoxContext sfsContext = (SmartFoxContext)context;
sfsContext.setApi(getApi());
sfsContext.setExtension(this);
} |
python | def get_info(self, code):
"""Return a dict of information about the currency"""
currency = self.get_currency(code)
info = {}
users = list(filter(None, currency['users'].split(',')))
if users:
info['Users'] = users
alt = list(filter(None, currency['alternatives'].split(',')))
if alt:
info['Alternatives'] = alt
if self.modified:
info['YFUpdate'] = self.modified.isoformat()
return info |
python | def _register_server(self, server, timeout=30):
'''Register a new SiriDB Server.
This method is used by the SiriDB manage tool and should not be used
otherwise. Full access rights are required for this request.
'''
result = self._loop.run_until_complete(
self._protocol.send_package(CPROTO_REQ_REGISTER_SERVER,
data=server,
timeout=timeout))
return result |
python | def add_peer(self, peer_addr):
"Build a connection to the Hub at a given ``(host, port)`` address"
peer = connection.Peer(
self._ident, self._dispatcher, peer_addr, backend.Socket())
peer.start()
self._started_peers[peer_addr] = peer |
java | public ServiceFuture<DatabasePrincipalListResultInner> removePrincipalsAsync(String resourceGroupName, String clusterName, String databaseName, final ServiceCallback<DatabasePrincipalListResultInner> serviceCallback) {
return ServiceFuture.fromResponse(removePrincipalsWithServiceResponseAsync(resourceGroupName, clusterName, databaseName), serviceCallback);
} |
java | private void updateConduitSyncHandlerConnection(List<String> sourceList, SynchronousHandler handler) {
if (sourceList.contains("message")) {
logConduit.addSyncHandler(handler);
} else {
logConduit.removeSyncHandler(handler);
}
if (sourceList.contains("trace")) {
traceConduit.addSyncHandler(handler);
} else {
traceConduit.removeSyncHandler(handler);
}
} |
java | public void writeBareObject (Object object)
throws IOException
{
writeBareObject(object, Streamer.getStreamer(Streamer.getStreamerClass(object)), true);
} |
java | public static BigtableTableAdminClient create(
@Nonnull String projectId,
@Nonnull String instanceId,
@Nonnull EnhancedBigtableTableAdminStub stub) {
return new BigtableTableAdminClient(projectId, instanceId, stub);
} |
python | def asarray(arraylike, strict=True):
"""
Converts arraylike objects to NumPy ndarray types. Errors if
object is not arraylike and strict option is enabled.
"""
if isinstance(arraylike, np.ndarray):
return arraylike
elif isinstance(arraylike, list):
return np.asarray(arraylike, dtype=object)
elif not isinstance(arraylike, np.ndarray) and isinstance(arraylike, arraylike_types):
return arraylike.values
elif hasattr(arraylike, '__array__'):
return np.asarray(arraylike)
elif strict:
raise ValueError('Could not convert %s type to array' % type(arraylike))
return arraylike |
python | def widgets(self):
"""
Get the Ext JS specific customization from the activity.
:return: The Ext JS specific customization in `list(dict)` form
"""
customization = self.activity._json_data.get('customization')
if customization and "ext" in customization.keys():
return customization['ext']['widgets']
else:
return [] |
python | def on_created(self, event):
'''Fired when something's been created'''
if self.trigger != "create":
return
action_input = ActionInput(event, "", self.name)
flows.Global.MESSAGE_DISPATCHER.send_message(action_input) |
python | def _delete_cells(self, column_family_id, columns, time_range=None, state=None):
"""Helper for :meth:`delete_cell` and :meth:`delete_cells`.
``state`` is unused by :class:`DirectRow` but is used by
subclasses.
:type column_family_id: str
:param column_family_id: The column family that contains the column
or columns with cells being deleted. Must be
of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
:type columns: :class:`list` of :class:`str` /
:func:`unicode <unicode>`, or :class:`object`
:param columns: The columns within the column family that will have
cells deleted. If :attr:`ALL_COLUMNS` is used then
the entire column family will be deleted from the row.
:type time_range: :class:`TimestampRange`
:param time_range: (Optional) The range of time within which cells
should be deleted.
:type state: bool
:param state: (Optional) The state that is passed along to
:meth:`_get_mutations`.
"""
mutations_list = self._get_mutations(state)
if columns is self.ALL_COLUMNS:
mutation_val = data_v2_pb2.Mutation.DeleteFromFamily(
family_name=column_family_id
)
mutation_pb = data_v2_pb2.Mutation(delete_from_family=mutation_val)
mutations_list.append(mutation_pb)
else:
delete_kwargs = {}
if time_range is not None:
delete_kwargs["time_range"] = time_range.to_pb()
to_append = []
for column in columns:
column = _to_bytes(column)
# time_range will never change if present, but the rest of
# delete_kwargs will
delete_kwargs.update(
family_name=column_family_id, column_qualifier=column
)
mutation_val = data_v2_pb2.Mutation.DeleteFromColumn(**delete_kwargs)
mutation_pb = data_v2_pb2.Mutation(delete_from_column=mutation_val)
to_append.append(mutation_pb)
# We don't add the mutations until all columns have been
# processed without error.
mutations_list.extend(to_append) |
java | public void addComponent(Object screenField)
{ // Set up the dependencies
if (m_iNamePrefix != null)
m_recThis.getField(m_iNamePrefix).addComponent(screenField);
m_recThis.getField(m_iNameFirst).addComponent(screenField);
if (m_iNameMiddle != null)
m_recThis.getField(m_iNameMiddle).addComponent(screenField);
m_recThis.getField(m_iNameSur).addComponent(screenField);
if (m_iNameSuffix != null)
m_recThis.getField(m_iNameSuffix).addComponent(screenField);
if (m_iNameTitle != null)
m_recThis.getField(m_iNameTitle).addComponent(screenField);
super.addComponent(screenField);
} |
java | public static String insert(final String str, final String given, final int begin, final int end) {
if (begin < 1 || end > str.length() || end < begin) { return str; }
return str.substring(0, begin - 1) + given + str.substring(end);
} |
python | def _normalized_cookie_tuples(self, attrs_set):
"""Return list of tuples containing normalised cookie information.
attrs_set is the list of lists of key,value pairs extracted from
the Set-Cookie or Set-Cookie2 headers.
Tuples are name, value, standard, rest, where name and value are the
cookie name and value, standard is a dictionary containing the standard
cookie-attributes (discard, secure, version, expires or max-age,
domain, path and port) and rest is a dictionary containing the rest of
the cookie-attributes.
"""
cookie_tuples = []
boolean_attrs = "discard", "secure"
value_attrs = ("version",
"expires", "max-age",
"domain", "path", "port",
"comment", "commenturl")
for cookie_attrs in attrs_set:
name, value = cookie_attrs[0]
# Build dictionary of standard cookie-attributes (standard) and
# dictionary of other cookie-attributes (rest).
# Note: expiry time is normalised to seconds since epoch. V0
# cookies should have the Expires cookie-attribute, and V1 cookies
# should have Max-Age, but since V1 includes RFC 2109 cookies (and
# since V0 cookies may be a mish-mash of Netscape and RFC 2109), we
# accept either (but prefer Max-Age).
max_age_set = False
bad_cookie = False
standard = {}
rest = {}
for k, v in cookie_attrs[1:]:
lc = k.lower()
# don't lose case distinction for unknown fields
if lc in value_attrs or lc in boolean_attrs:
k = lc
if k in boolean_attrs and v is None:
# boolean cookie-attribute is present, but has no value
# (like "discard", rather than "port=80")
v = True
if k in standard:
# only first value is significant
continue
if k == "domain":
if v is None:
_debug(" missing value for domain attribute")
bad_cookie = True
break
# RFC 2965 section 3.3.3
v = v.lower()
if k == "expires":
if max_age_set:
# Prefer max-age to expires (like Mozilla)
continue
if v is None:
_debug(" missing or invalid value for expires "
"attribute: treating as session cookie")
continue
if k == "max-age":
max_age_set = True
try:
v = int(v)
except ValueError:
_debug(" missing or invalid (non-numeric) value for "
"max-age attribute")
bad_cookie = True
break
# convert RFC 2965 Max-Age to seconds since epoch
# XXX Strictly you're supposed to follow RFC 2616
# age-calculation rules. Remember that zero Max-Age is a
# is a request to discard (old and new) cookie, though.
k = "expires"
v = self._now + v
if (k in value_attrs) or (k in boolean_attrs):
if (v is None and
k not in ("port", "comment", "commenturl")):
_debug(" missing value for %s attribute" % k)
bad_cookie = True
break
standard[k] = v
else:
rest[k] = v
if bad_cookie:
continue
cookie_tuples.append((name, value, standard, rest))
return cookie_tuples |
python | def ned_to_use(tensor):
'''
Converts a tensor in NED coordinate sytem to USE
'''
return np.array(ROT_NED_USE * np.matrix(tensor) * ROT_NED_USE.T) |
python | def calculate_size(self, modules_per_line, number_of_lines, dpi=300):
"""Calculates the size of the barcode in pixel.
:parameters:
modules_per_line : Integer
Number of modules in one line.
number_of_lines : Integer
Number of lines of the barcode.
dpi : Integer
DPI to calculate.
:returns: Width and height of the barcode in pixel.
:rtype: Tuple
"""
width = 2 * self.quiet_zone + modules_per_line * self.module_width
height = 2.0 + self.module_height * number_of_lines
if self.font_size and self.text:
height += pt2mm(self.font_size) / 2 + self.text_distance
return int(mm2px(width, dpi)), int(mm2px(height, dpi)) |
python | def get_files(*bases):
"""
List all files in a data directory.
"""
for base in bases:
basedir, _ = base.split(".", 1)
base = os.path.join(os.path.dirname(__file__), *base.split("."))
rem = len(os.path.dirname(base)) + len(basedir) + 2
for root, dirs, files in os.walk(base):
for name in files:
yield os.path.join(basedir, root, name)[rem:] |
java | @Override
public void run() {
while (true) {
int prefetchBatchSize;
boolean nackQueueMessages = false;
List<Message> messages = null;
try {
if (isClosed()) {
break;
}
synchronized (stateLock) {
waitForStart();
waitForPrefetch();
prefetchBatchSize = Math.min(numberOfMessagesToFetch(), SQSMessagingClientConstants.MAX_BATCH);
}
if (!isClosed()) {
messages = getMessages(prefetchBatchSize);
}
if (messages != null && !messages.isEmpty()) {
processReceivedMessages(messages);
}
} catch (InterruptedException e) {
nackQueueMessages = true;
break;
} catch (Throwable e) {
LOG.error("Unexpected exception when prefetch messages:", e);
nackQueueMessages = true;
throw new RuntimeException(e);
} finally {
if (isClosed() || nackQueueMessages) {
nackQueueMessages();
}
}
}
} |
python | def _get_client_by_id(self, client_id):
"""Get GRR client dictionary and make sure valid approvals exist.
Args:
client_id: GRR client ID.
Returns:
GRR API Client object
"""
client = self.grr_api.Client(client_id)
print('Checking for client approval')
self._check_approval_wrapper(client, client.ListFlows)
print('{0:s}: Client approval is valid'.format(client_id))
return client.Get() |
python | def connect_post_namespaced_pod_proxy_with_path(self, name, namespace, path, **kwargs): # noqa: E501
"""connect_post_namespaced_pod_proxy_with_path # noqa: E501
connect POST requests to proxy of Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.connect_post_namespaced_pod_proxy_with_path(name, namespace, path, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the PodProxyOptions (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str path: path to the resource (required)
:param str path2: Path is the URL path to use for the current proxy request to pod.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.connect_post_namespaced_pod_proxy_with_path_with_http_info(name, namespace, path, **kwargs) # noqa: E501
else:
(data) = self.connect_post_namespaced_pod_proxy_with_path_with_http_info(name, namespace, path, **kwargs) # noqa: E501
return data |
python | def getargspec(f):
"""A replacement for inspect.getargspec"""
spec = getfullargspec(f)
return ArgSpec(spec.args, spec.varargs, spec.varkw, spec.defaults) |
python | def resendFaxRN_multi(self, CorpNum, OrgRequestNum, SenderNum, SenderName, Receiver, ReserveDT=None, UserID=None,
title=None, RequestNum=None):
""" ν©μ€ μ μ‘
args
CorpNum : νλΉνμ μ¬μ
μλ²νΈ
OrgRequestNum : μλ³Έ ν©μ€ μ μ‘μ ν λΉν μ μ‘μμ²λ²νΈ
SenderNum : λ°μ μ λ²νΈ
SenderName : λ°μ μλͺ
Receiver : μμ μμ 보 λ°°μ΄
ReserveDT : μμ½μκ°(νμ yyyyMMddHHmmss)
UserID : νλΉνμ μμ΄λ
title : ν©μ€μ λͺ©
RequestNum : μ μ‘μμ²μ ν λΉν μ μ‘μμ²λ²νΈ
return
μ μλ²νΈ (receiptNum)
raise
PopbillException
"""
req = {}
if not OrgRequestNum:
raise PopbillException(-99999999, "μλ³Έ ν©μ€ μμ²λ²νΈκ° μ
λ ₯λμ§ μμμ΅λλ€")
if SenderNum != "":
req['snd'] = SenderNum
if SenderName != "":
req['sndnm'] = SenderName
if ReserveDT != None:
req['sndDT'] = ReserveDT
if title != None:
req['title'] = title
if RequestNum != None:
req['requestNum'] = RequestNum
if Receiver != None:
req['rcvs'] = []
if (type(Receiver) is str):
Receiver = FaxReceiver(receiveNum=Receiver)
if (type(Receiver) is FaxReceiver):
Receiver = [Receiver]
for r in Receiver:
req['rcvs'].append({"rcv": r.receiveNum, "rcvnm": r.receiveName})
postData = self._stringtify(req)
return self._httppost('/FAX/Resend/' + OrgRequestNum, postData, CorpNum, UserID).receiptNum |
python | def _processHandler(self, securityHandler, param_dict):
"""proceses the handler and returns the cookiejar"""
cj = None
handler = None
if securityHandler is None:
cj = cookiejar.CookieJar()
elif securityHandler.method.lower() == "token":
param_dict['token'] = securityHandler.token
if hasattr(securityHandler, 'cookiejar'):
cj = securityHandler.cookiejar
if hasattr(securityHandler, 'handler'):
handler = securityHandler.handler
elif securityHandler.method.lower() == "handler":
handler = securityHandler.handler
cj = securityHandler.cookiejar
return param_dict, handler, cj |
java | public static <T> Optional<T> get(Config config, Function<String, T> getter, String path) {
if (!config.hasPath(path)) {
return Optional.empty();
}
return Optional.of(getter.apply(path));
} |
java | void reportChecksumFailure(String file, LocatedBlock lblocks[]) {
try {
reportBadBlocks(lblocks);
} catch (IOException ie) {
LOG.info("Found corruption while reading " + file
+ ". Error repairing corrupt blocks. Bad blocks remain. "
+ StringUtils.stringifyException(ie));
}
} |
java | public static sslservicegroup[] get_filtered(nitro_service service, String filter) throws Exception{
sslservicegroup obj = new sslservicegroup();
options option = new options();
option.set_filter(filter);
sslservicegroup[] response = (sslservicegroup[]) obj.getfiltered(service, option);
return response;
} |
java | public <T> void queue(StorageRequest<T> req, JsonBatchCallback<T> callback) throws IOException {
checkState(
!requestsExecutor.isShutdown() && !requestsExecutor.isTerminated(),
"requestsExecutor should not be terminated to queue batch requests");
if (maxRequestsPerBatch == 1) {
responseFutures.add(
requestsExecutor.submit(
() -> {
execute(req, callback);
return null;
}));
} else {
pendingRequests.add(batch -> req.queue(batch, callback));
flushIfPossibleAndRequired();
}
} |
python | def writeTuple(self, val, what):
""" Writes a tuple of numbers (on one line).
"""
# Limit to three values. so RGBA data drops the alpha channel
# Format can handle up to 3 texcords
val = val[:3]
# Make string
val = ' '.join([str(v) for v in val])
# Write line
self.writeLine('%s %s' % (what, val)) |
python | def getnameinfo(sockaddr, flags=0, timeout=30):
"""Resolve a socket address *sockaddr* back to a ``(node, service)`` tuple.
The *flags* argument can be used to modify the resolution process. See the
:func:`socket.getnameinfo` function for more information.
The address resolution is performed in the libuv thread pool.
"""
hub = get_hub()
with switch_back(timeout) as switcher:
request = pyuv.dns.getnameinfo(hub.loop, sockaddr, flags, callback=switcher)
switcher.add_cleanup(request.cancel)
result = hub.switch()
result, error = result[0]
if error:
message = pyuv.errno.strerror(error)
raise pyuv.error.UVError(error, message)
return result |
java | private Assignment getBestAssignment(Assignment portion, CliqueTree cliqueTree, int initialFactor) {
if (cliqueTree.getMarginals().size() == 0) {
// Special case where the factor graph has no factors in it.
return conditionedValues;
} else {
// General case
SortedSet<Integer> unvisited = Sets.newTreeSet();
for (int i = 0; i < cliqueTree.numFactors(); i++) {
unvisited.add(i);
}
Set<Integer> visited = Sets.newHashSet();
Assignment current = portion;
int nextFactor = initialFactor;
while (unvisited.size() > 0) {
current = getBestAssignmentGiven(cliqueTree, nextFactor, visited, current);
unvisited.removeAll(visited);
if (unvisited.size() > 0) {
nextFactor = unvisited.first();
}
}
return current.union(conditionedValues);
}
} |
python | def reset_cmd_timeout(self):
"""Reset timeout for command execution."""
if self._cmd_timeout:
self._cmd_timeout.cancel()
self._cmd_timeout = self.loop.call_later(self.client.timeout,
self.transport.close) |
python | def next_event(self):
"""Parse the next event out of our receive buffer, update our internal
state, and return it.
This is a mutating operation -- think of it like calling :func:`next`
on an iterator.
Returns:
: One of three things:
1) An event object -- see :ref:`events`.
2) The special constant :data:`NEED_DATA`, which indicates that
you need to read more data from your socket and pass it to
:meth:`receive_data` before this method will be able to return
any more events.
3) The special constant :data:`PAUSED`, which indicates that we
are not in a state where we can process incoming data (usually
because the peer has finished their part of the current
request/response cycle, and you have not yet called
:meth:`start_next_cycle`). See :ref:`flow-control` for details.
Raises:
RemoteProtocolError:
The peer has misbehaved. You should close the connection
(possibly after sending some kind of 4xx response).
Once this method returns :class:`ConnectionClosed` once, then all
subsequent calls will also return :class:`ConnectionClosed`.
If this method raises any exception besides :exc:`RemoteProtocolError`
then that's a bug -- if it happens please file a bug report!
If this method raises any exception then it also sets
:attr:`Connection.their_state` to :data:`ERROR` -- see
:ref:`error-handling` for discussion.
"""
if self.their_state is ERROR:
raise RemoteProtocolError(
"Can't receive data when peer state is ERROR")
try:
event = self._extract_next_receive_event()
if event not in [NEED_DATA, PAUSED]:
self._process_event(self.their_role, event)
self._receive_buffer.compress()
if event is NEED_DATA:
if len(self._receive_buffer) > self._max_incomplete_event_size:
# 431 is "Request header fields too large" which is pretty
# much the only situation where we can get here
raise RemoteProtocolError("Receive buffer too long",
error_status_hint=431)
if self._receive_buffer_closed:
# We're still trying to complete some event, but that's
# never going to happen because no more data is coming
raise RemoteProtocolError(
"peer unexpectedly closed connection")
return event
except BaseException as exc:
self._process_error(self.their_role)
if isinstance(exc, LocalProtocolError):
exc._reraise_as_remote_protocol_error()
else:
raise |
java | public BoxFile.Info uploadNewVersion(InputStream fileContent, String fileContentSHA1) {
return this.uploadNewVersion(fileContent, fileContentSHA1, null);
} |
python | def load_default_model_sentencizer(lang):
""" Load a generic spaCy model and add the sentencizer for sentence tokenization"""
loading_start = time.time()
lang_class = get_lang_class(lang)
nlp = lang_class()
nlp.add_pipe(nlp.create_pipe('sentencizer'))
loading_end = time.time()
loading_time = loading_end - loading_start
return nlp, loading_time, lang + "_default_" + 'sentencizer' |
python | def applyPolicies(self, request):
'''
Apply relevant header policies to request. Call me where
appropriate in your render_* methods.
'''
for policy in self._actingPolicies[request.method]:
request = policy.apply(request)
return request |
java | public void restoreAfterSafeModeRestart() {
if (!clusterManager.safeMode) {
return;
}
for (Session session : sessions.values()) {
for (ResourceRequestInfo resourceRequestInfo :
session.idToRequest.values()) {
// The helper method to restore the ResourceRequestInfo instances
// is placed in NodeManager because it makes use of other members
// of NodeManager
clusterManager.nodeManager.
restoreResourceRequestInfo(resourceRequestInfo);
}
session.restoreAfterSafeModeRestart();
clusterManager.getScheduler().addSession(session.getSessionId(),
session);
}
clusterManager.getMetrics().setNumRunningSessions(sessions.size());
} |
java | public static SipApplicationSessionKey getSipApplicationSessionKey(final String applicationName, final String id, final String appGeneratedKey) {
if (logger.isDebugEnabled()){
logger.debug("getSipApplicationSessionKey - applicationName=" + applicationName + ", id=" + id + ", appGeneratedKey=" + appGeneratedKey);
}
if(applicationName == null) {
throw new NullPointerException("the application name cannot be null for sip application session key creation");
}
return new SipApplicationSessionKey(
id,
applicationName,
appGeneratedKey);
} |
python | def GetClass(self, *args, **kwargs):
"""Retrieve a CIM class from the local repository of this class.
For a description of the parameters, see
:meth:`pywbem.WBEMConnection.GetClass`.
"""
cname = args[0] if args else kwargs['ClassName']
try:
cc = self.classes[self.default_namespace][cname]
except KeyError:
if self.conn is None:
ce = CIMError(CIM_ERR_NOT_FOUND, cname)
raise ce
cc = self.conn.GetClass(*args, **kwargs)
try:
self.classes[self.default_namespace][cc.classname] = cc
except KeyError:
self.classes[self.default_namespace] = \
NocaseDict({cc.classname: cc})
if 'LocalOnly' in kwargs and not kwargs['LocalOnly']:
if cc.superclass:
try:
del kwargs['ClassName']
except KeyError:
pass
if args:
args = args[1:]
super_ = self.GetClass(cc.superclass, *args, **kwargs)
for prop in super_.properties.values():
if prop.name not in cc.properties:
cc.properties[prop.name] = prop
for meth in super_.methods.values():
if meth.name not in cc.methods:
cc.methods[meth.name] = meth
return cc |
java | @Override
public void execute()
throws MojoExecutionException, MojoFailureException
{
if ( !"pom".equals( project.getPackaging() ) )
{
long ts = System.currentTimeMillis();
try
{
internalExecute();
}
finally
{
long te = System.currentTimeMillis();
getLog().debug( String.format( "Mojo execution time: %d ms", te - ts ) );
}
}
} |
python | def _addLoggingOptions(addOptionFn):
"""Adds logging options
"""
##################################################
# BEFORE YOU ADD OR REMOVE OPTIONS TO THIS FUNCTION, KNOW THAT
# YOU MAY ONLY USE VARIABLES ACCEPTED BY BOTH optparse AND argparse
# FOR EXAMPLE, YOU MAY NOT USE default=%default OR default=%(default)s
##################################################
addOptionFn("--logOff", dest="logOff", action="store_true", default=False,
help="Turn off logging. (default is CRITICAL)")
addOptionFn(
"--logInfo", dest="logInfo", action="store_true", default=False,
help="Turn on logging at INFO level. (default is CRITICAL)")
addOptionFn(
"--logDebug", dest="logDebug", action="store_true", default=False,
help="Turn on logging at DEBUG level. (default is CRITICAL)")
addOptionFn(
"--logLevel", dest="logLevel", default='CRITICAL',
help=("Log at level (may be either OFF/INFO/DEBUG/CRITICAL). "
"(default is CRITICAL)"))
addOptionFn("--logFile", dest="logFile", help="File to log in")
addOptionFn(
"--rotatingLogging", dest="logRotating", action="store_true",
default=False, help=("Turn on rotating logging, which prevents log "
"files getting too big.")) |
python | def same_network(atree, btree) -> bool:
"""True if given trees share the same structure of powernodes,
independently of (power)node names,
and same edge topology between (power)nodes.
"""
return same_hierarchy(atree, btree) and same_topology(atree, btree) |
java | @Override
public void fit(Dataframe trainingData) {
Set<TypeInference.DataType> supportedYDataTypes = getSupportedYDataTypes();
if(supportedYDataTypes != null && !supportedYDataTypes.contains(trainingData.getYDataType())) {
throw new IllegalArgumentException("The response variable DataType of the Dataframe is not supported by this method.");
}
super.fit(trainingData);
} |
java | public static <Key, Value> Aggregation<Key, BigInteger, BigInteger> bigIntegerSum() {
return new AggregationAdapter(new BigIntegerSumAggregation<Key, Value>());
} |
java | public void write(OutputStream out)
throws IOException
{
Writer writer = new OutputStreamWriter(out);
write(writer);
writer.flush();
} |
java | public static FacesBackingBeanFactory get( ServletContext servletContext )
{
FacesBackingBeanFactory factory = ( FacesBackingBeanFactory ) servletContext.getAttribute( CONTEXT_ATTR );
assert factory != null
: FacesBackingBeanFactory.class.getName() + " was not found in ServletContext attribute " + CONTEXT_ATTR;
factory.reinit( servletContext );
return factory;
} |
python | def tr(self, *args, **kwargs):
'''
calc_fn.tr(...) yields a copy of calc_fn in which the afferent and efferent values of the
function have been translated. The translation is found from merging the list of 0 or more
dictionary arguments given left-to-right followed by the keyword arguments.
'''
d = merge(args, kwargs)
# make a copy
translation = copy.copy(self)
object.__setattr__(translation, 'afferents',
tuple(d[af] if af in d else af for af in self.afferents))
object.__setattr__(translation, 'efferents',
tuple(d[ef] if ef in d else ef for ef in self.efferents))
object.__setattr__(translation, 'afferent_docs',
ps.pmap({kk:ps.pmap({(d[k] if k in d else k):v
for (k,v) in six.iteritems(vv)})
for (kk,vv) in six.iteritems(self.afferent_docs)}))
object.__setattr__(translation, 'efferent_docs',
ps.pmap({kk:ps.pmap({(d[k] if k in d else k):v
for (k,v) in six.iteritems(vv)})
for (kk,vv) in six.iteritems(self.efferent_docs)}))
fn = self.function
def _tr_fn_wrapper(*args, **kwargs):
res = fn(*args, **kwargs)
if isinstance(res, colls.Mapping):
return {(d[k] if k in d else k):v for (k,v) in six.iteritems(res)}
else:
return res
object.__setattr__(translation, 'function', _tr_fn_wrapper)
return translation |
java | public List<VirtualMachineExtensionImageInner> listVersions(String location, String publisherName, String type, String filter, Integer top, String orderby) {
return listVersionsWithServiceResponseAsync(location, publisherName, type, filter, top, orderby).toBlocking().single().body();
} |
java | public static KickflipApiClient setup(Context context, String key, String secret) {
return setup(context, key, secret, null);
} |
java | public Collection<String> getEncryptTableNames() {
Set<String> result = new LinkedHashSet<>();
for (ShardingEncryptorStrategy each : shardingEncryptorStrategies.values()) {
result.addAll(each.getEncryptTableNames());
}
return result;
} |
python | def wait_running(self, timeout=None):
"""Wait for the thread to pass control to its routine.
Args:
timeout (float): The maximum amount of time to wait
"""
flag = self._running.wait(timeout)
if flag is False:
raise TimeoutExpiredError("Timeout waiting for thread to start running") |
python | def _log_function(self, handler):
"""Override Application.log_function so that what to log can be controlled.
"""
if handler.get_status() < 400:
log_method = request_log.info
elif handler.get_status() < 500:
log_method = request_log.warning
else:
log_method = request_log.error
for i in settings['LOGGING_IGNORE_URLS']:
if handler.request.uri.startswith(i):
log_method = request_log.debug
break
request_time = 1000.0 * handler.request.request_time()
log_method("%d %s %.2fms", handler.get_status(),
handler._request_summary(), request_time) |
java | public WeeklyAutoScalingSchedule withSunday(java.util.Map<String, String> sunday) {
setSunday(sunday);
return this;
} |
java | public static final AuthToken consumeToken(String encodedJson) throws IllegalArgumentException {
String json = StringUtils.newStringUtf8(Base64.decodeBase64(encodedJson));
AuthToken token = fromJSON(json);
validateToken(token);
return token;
} |
python | def selfSignCert(self, cert, pkey):
'''
Self-sign a certificate.
Args:
cert (OpenSSL.crypto.X509): The certificate to sign.
pkey (OpenSSL.crypto.PKey): The PKey with which to sign the certificate.
Examples:
Sign a given certificate with a given private key:
cdir.selfSignCert(mycert, myotherprivatekey)
Returns:
None
'''
cert.set_issuer(cert.get_subject())
cert.sign(pkey, self.signing_digest) |
java | private static List<Method> requestMappingMethods(Object object) {
return getAllMethods(object.getClass(), withModifier(Modifier.PUBLIC))
.stream()
// Lookup super classes just in case if the object is a proxy.
.filter(m -> getAnnotations(m, FindOption.LOOKUP_SUPER_CLASSES)
.stream()
.map(Annotation::annotationType)
.anyMatch(a -> a == Path.class ||
HTTP_METHOD_MAP.containsKey(a)))
.sorted(Comparator.comparingInt(AnnotatedHttpServiceFactory::order))
.collect(toImmutableList());
} |
python | def get_metadata(audio_filepaths):
""" Return a tuple of album, artist, has_embedded_album_art from a list of audio files. """
artist, album, has_embedded_album_art = None, None, None
for audio_filepath in audio_filepaths:
try:
mf = mutagen.File(audio_filepath)
except Exception:
continue
if mf is None:
continue
# artist
for key in ("albumartist", "artist", # ogg
"TPE1", "TPE2", # mp3
"aART", "\xa9ART"): # mp4
try:
val = mf.get(key, None)
except ValueError:
val = None
if val is not None:
artist = val[-1]
break
# album
for key in ("_album", "album", # ogg
"TALB", # mp3
"\xa9alb"): # mp4
try:
val = mf.get(key, None)
except ValueError:
val = None
if val is not None:
album = val[-1]
break
if artist and album:
# album art
if isinstance(mf, mutagen.ogg.OggFileType):
has_embedded_album_art = "metadata_block_picture" in mf
elif isinstance(mf, mutagen.mp3.MP3):
has_embedded_album_art = any(map(operator.methodcaller("startswith", "APIC:"), mf.keys()))
elif isinstance(mf, mutagen.mp4.MP4):
has_embedded_album_art = "covr" in mf
# stop at the first file that succeeds (for performance)
break
return artist, album, has_embedded_album_art |
java | public InternalContext createSubContext(VariantIndexer[] indexers, InternalContext localContext, int varSize) {
Object[][] myParentScopes = this.parentScopes;
//cal the new-context's parent-scopes
Object[][] scopes;
if (myParentScopes == null) {
scopes = new Object[][]{this.vars};
} else {
scopes = new Object[myParentScopes.length + 1][];
scopes[0] = this.vars;
System.arraycopy(myParentScopes, 0, scopes, 1, myParentScopes.length);
}
InternalContext newContext = new InternalContext(template, localContext.out, Vars.EMPTY,
indexers, varSize, scopes);
newContext.localContext = localContext;
return newContext;
} |
python | def output(s):
"""
Parse, transform, and pretty print
the result
"""
p = Parser()
t = ExpressionsTransformer()
ast = p.parse(s)
logging.debug(ast.pretty())
print(ast.pretty())
d = t.transform(ast)
print(json.dumps(d, indent=4))
return d |
python | def to_datetime(self, column):
'''
This function converts epoch timestamps to datetimes.
:param column: column to convert from current state -> datetime
'''
if column in self:
if self[column].dtype in NUMPY_NUMERICAL:
self[column] = pd.to_datetime(self[column], unit='s')
else:
self[column] = pd.to_datetime(self[column], utc=True) |
java | public OvhPackAdsl packName_GET(String packName) throws IOException {
String qPath = "/pack/xdsl/{packName}";
StringBuilder sb = path(qPath, packName);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhPackAdsl.class);
} |
java | @Override
public RegisterAVSDeviceResult registerAVSDevice(RegisterAVSDeviceRequest request) {
request = beforeClientExecution(request);
return executeRegisterAVSDevice(request);
} |
python | def array_correlation(x, y, axis=0):
"""Column- or row-wise Pearson correlation between two arrays
Computes sample Pearson correlation between two 1D or 2D arrays (e.g.,
two n_TRs by n_voxels arrays). For 2D arrays, computes correlation
between each corresponding column (axis=0) or row (axis=1) where axis
indexes observations. If axis=0 (default), each column is considered to
be a variable and each row is an observation; if axis=1, each row is a
variable and each column is an observation (equivalent to transposing
the input arrays). Input arrays must be the same shape with corresponding
variables and observations. This is intended to be an efficient method
for computing correlations between two corresponding arrays with many
variables (e.g., many voxels).
Parameters
----------
x : 1D or 2D ndarray
Array of observations for one or more variables
y : 1D or 2D ndarray
Array of observations for one or more variables (same shape as x)
axis : int (0 or 1), default: 0
Correlation between columns (axis=0) or rows (axis=1)
Returns
-------
r : float or 1D ndarray
Pearson correlation values for input variables
"""
# Accommodate array-like inputs
if not isinstance(x, np.ndarray):
x = np.asarray(x)
if not isinstance(y, np.ndarray):
y = np.asarray(y)
# Check that inputs are same shape
if x.shape != y.shape:
raise ValueError("Input arrays must be the same shape")
# Transpose if axis=1 requested (to avoid broadcasting
# issues introduced by switching axis in mean and sum)
if axis == 1:
x, y = x.T, y.T
# Center (de-mean) input variables
x_demean = x - np.mean(x, axis=0)
y_demean = y - np.mean(y, axis=0)
# Compute summed product of centered variables
numerator = np.sum(x_demean * y_demean, axis=0)
# Compute sum squared error
denominator = np.sqrt(np.sum(x_demean ** 2, axis=0) *
np.sum(y_demean ** 2, axis=0))
return numerator / denominator |
java | public SIMPIterator getStreams() throws SIMPControllableNotFoundException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "getStreams");
assertValidControllable();
Iterator it = null;
try
{
it = _streamSet.iterator();
}
catch (SIResourceException e)
{
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.runtime.SourceStreamSetControl.getStreams",
"1:713:1.39",
this);
SIMPRuntimeOperationFailedException finalE =
new SIMPRuntimeOperationFailedException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0002",
new Object[] {"SourceStreamSetControl.getStreams",
"1:721:1.39",
e},
null), e);
SibTr.exception(tc, finalE);
// if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getStreams", finalE);
// throw finalE;
}
SIMPIterator returnIterator=new SourceStreamControllableIterator(it);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "getStreams", returnIterator);
return returnIterator;
} |
python | def event_return(events):
'''
Return events to Mongodb server
'''
conn, mdb = _get_conn(ret=None)
if isinstance(events, list):
events = events[0]
if isinstance(events, dict):
log.debug(events)
if PYMONGO_VERSION > _LooseVersion('2.3'):
mdb.events.insert_one(events.copy())
else:
mdb.events.insert(events.copy()) |
python | def _operation_speak_as_digits(self, content, index, children):
"""
The operation method of _speak_as method for digits.
:param content: The text content of element.
:type content: str
:param index: The index of pattern in text content of element.
:type index: int
:param children: The children of element.
:type children: list(hatemile.util.html.htmldomelement.HTMLDOMElement)
"""
data_property_value = 'digits'
if index != 0:
children.append(self._create_content_element(
content[0:index],
data_property_value
))
children.append(self._create_aural_content_element(
' ',
data_property_value
))
children.append(self._create_content_element(
content[index:(index + 1)],
data_property_value
))
return children |
java | public static void parseXmlAndSetIconicsDrawables(@NonNull Context context,
int menuId,
@NonNull Menu menu) {
parseXmlAndSetIconicsDrawables(context, menuId, menu, false);
} |
python | def check_threats(**args):
"""
function to check input filetype against threat extensions list
"""
is_high_threat = False
for val in THREAT_EXTENSIONS.values():
if type(val) == list:
for el in val:
if args['file_type'] == el:
is_high_threat = True
break
else:
if args['file_type'] == val:
is_high_threat = True
break
return is_high_threat |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.