language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def p_var_decl(p):
""" var_decl : DIM idlist typedef
"""
for vardata in p[2]:
SYMBOL_TABLE.declare_variable(vardata[0], vardata[1], p[3])
p[0] = None |
java | public void setKeepAliveConfiguration(KeepAliveConfiguration keepAliveConfiguration)
{
if(keepAliveConfiguration != null)
client.setConnectionPool(new ConnectionPool(keepAliveConfiguration.getMaxIdleConnections(),
keepAliveConfiguration.getKeepAliveDurationMs()));
} |
java | @Override
public StopAutomationExecutionResult stopAutomationExecution(StopAutomationExecutionRequest request) {
request = beforeClientExecution(request);
return executeStopAutomationExecution(request);
} |
python | def _get_satellite_tile(self, x_tile, y_tile, z_tile):
"""Load up a single satellite image tile."""
cache_file = "mapscache/{}.{}.{}.jpg".format(z_tile, x_tile, y_tile)
if cache_file not in self._tiles:
if not os.path.isfile(cache_file):
url = _IMAGE_URL.format(z_tile, x_tile, y_tile, _KEY)
data = requests.get(url).content
with open(cache_file, 'wb') as f:
f.write(data)
self._tiles[cache_file] = [
x_tile, y_tile, z_tile,
ColourImageFile(self._screen, cache_file, height=_START_SIZE, dither=True,
uni=self._screen.unicode_aware),
True]
if len(self._tiles) > _CACHE_SIZE:
self._tiles.popitem(False)
self._screen.force_update() |
python | def parse_table(fq_table: str) -> Tuple[str, str]:
"""Parse a tablename into tuple(<schema>, <table>).
Schema defaults to doc if the table name doesn't contain a schema.
>>> parse_table('x.users')
('x', 'users')
>>> parse_table('users')
('doc', 'users')
"""
parts = fq_table.split('.')
if len(parts) == 1:
return 'doc', parts[0]
elif len(parts) == 2:
return parts[0], parts[1]
else:
raise ValueError |
python | def getBox(box, pagesize):
"""
Parse sizes by corners in the form:
<X-Left> <Y-Upper> <Width> <Height>
The last to values with negative values are interpreted as offsets form
the right and lower border.
"""
box = str(box).split()
if len(box) != 4:
raise Exception("box not defined right way")
x, y, w, h = [getSize(pos) for pos in box]
return getCoords(x, y, w, h, pagesize) |
java | public static autoscaleprofile get(nitro_service service, String name) throws Exception{
autoscaleprofile obj = new autoscaleprofile();
obj.set_name(name);
autoscaleprofile response = (autoscaleprofile) obj.get_resource(service);
return response;
} |
python | def split_csp_str(val):
""" Split comma separated string into unique values, keeping their order.
:returns: list of splitted values
"""
seen = set()
values = val if isinstance(val, (list, tuple)) else val.strip().split(',')
return [x for x in values if x and not (x in seen or seen.add(x))] |
java | public ValueHolder<V> resolve(ServerStoreProxy.ChainEntry entry, K key, long now) {
return resolve(entry, key, now, 0);
} |
python | def snapshots(self, xml_bytes):
"""Parse the XML returned by the C{DescribeSnapshots} function.
@param xml_bytes: XML bytes with a C{DescribeSnapshotsResponse} root
element.
@return: A list of L{Snapshot} instances.
TODO: ownersSet, restorableBySet, ownerId, volumeSize, description,
ownerAlias.
"""
root = XML(xml_bytes)
result = []
for snapshot_data in root.find("snapshotSet"):
snapshot_id = snapshot_data.findtext("snapshotId")
volume_id = snapshot_data.findtext("volumeId")
status = snapshot_data.findtext("status")
start_time = snapshot_data.findtext("startTime")
start_time = datetime.strptime(
start_time[:19], "%Y-%m-%dT%H:%M:%S")
progress = snapshot_data.findtext("progress")[:-1]
progress = float(progress or "0") / 100.
snapshot = model.Snapshot(
snapshot_id, volume_id, status, start_time, progress)
result.append(snapshot)
return result |
python | def add_segmented_colorbar(da, colors, direction):
"""
Add 'non-rastered' colorbar to DrawingArea
"""
nbreak = len(colors)
if direction == 'vertical':
linewidth = da.height/nbreak
verts = [None] * nbreak
x1, x2 = 0, da.width
for i, color in enumerate(colors):
y1 = i * linewidth
y2 = y1 + linewidth
verts[i] = ((x1, y1), (x1, y2), (x2, y2), (x2, y1))
else:
linewidth = da.width/nbreak
verts = [None] * nbreak
y1, y2 = 0, da.height
for i, color in enumerate(colors):
x1 = i * linewidth
x2 = x1 + linewidth
verts[i] = ((x1, y1), (x1, y2), (x2, y2), (x2, y1))
coll = mcoll.PolyCollection(verts,
facecolors=colors,
linewidth=0,
antialiased=False)
da.add_artist(coll) |
java | protected boolean invokeTraceRouters(RoutedMessage routedTrace) {
boolean retMe = true;
LogRecord logRecord = routedTrace.getLogRecord();
/*
* Avoid any feedback traces that are emitted after this point.
* The first time the counter increments is the first pass-through.
* The second time the counter increments is the second pass-through due
* to trace emitted. We do not want any more pass-throughs.
*/
try {
if (!(counterForTraceRouter.incrementCount() > 2)) {
if (logRecord != null) {
Level level = logRecord.getLevel();
int levelValue = level.intValue();
if (levelValue < Level.INFO.intValue()) {
String levelName = level.getName();
if (!(levelName.equals("SystemOut") || levelName.equals("SystemErr"))) { //SystemOut/Err=700
WsTraceRouter internalTrRouter = internalTraceRouter.get();
if (internalTrRouter != null) {
retMe &= internalTrRouter.route(routedTrace);
} else if (earlierTraces != null) {
synchronized (this) {
if (earlierTraces != null) {
earlierTraces.add(routedTrace);
}
}
}
}
}
}
}
} finally {
counterForTraceRouter.decrementCount();
}
return retMe;
} |
python | def archive(self, ostream, treeish=None, prefix=None, **kwargs):
"""Archive the tree at the given revision.
:param ostream: file compatible stream object to which the archive will be written as bytes
:param treeish: is the treeish name/id, defaults to active branch
:param prefix: is the optional prefix to prepend to each filename in the archive
:param kwargs: Additional arguments passed to git-archive
* Use the 'format' argument to define the kind of format. Use
specialized ostreams to write any format supported by python.
* You may specify the special **path** keyword, which may either be a repository-relative
path to a directory or file to place into the archive, or a list or tuple of multiple paths.
:raise GitCommandError: in case something went wrong
:return: self"""
if treeish is None:
treeish = self.head.commit
if prefix and 'prefix' not in kwargs:
kwargs['prefix'] = prefix
kwargs['output_stream'] = ostream
path = kwargs.pop('path', [])
if not isinstance(path, (tuple, list)):
path = [path]
# end assure paths is list
self.git.archive(treeish, *path, **kwargs)
return self |
java | public void marshall(DescribeInterconnectsRequest describeInterconnectsRequest, ProtocolMarshaller protocolMarshaller) {
if (describeInterconnectsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(describeInterconnectsRequest.getInterconnectId(), INTERCONNECTID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def ridgecircle(self, x, expo=0.5):
"""happy cat by HG Beyer"""
a = len(x)
s = sum(x**2)
return ((s - a)**2)**(expo / 2) + s / a + sum(x) / a |
java | public static String escapeStringForJsRegexp(String input) {
JsString string = uncheckedCast(input);
return string.replace(ESCAPE_JS_STRING_REGEXP, "\\$&");
} |
python | def save_data(X, y, path):
"""Save data as a CSV, LibSVM or HDF5 file based on the file extension.
Args:
X (numpy or scipy sparse matrix): Data matrix
y (numpy array): Target vector. If None, all zero vector will be saved.
path (str): Path to the CSV, LibSVM or HDF5 file to save data.
"""
catalog = {'.csv': save_csv, '.sps': save_libsvm, '.h5': save_hdf5}
ext = os.path.splitext(path)[1]
func = catalog[ext]
if y is None:
y = np.zeros((X.shape[0], ))
func(X, y, path) |
python | def set_window_close_callback(window, cbfun):
"""
Sets the close callback for the specified window.
Wrapper for:
GLFWwindowclosefun glfwSetWindowCloseCallback(GLFWwindow* window, GLFWwindowclosefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_close_callback_repository:
previous_callback = _window_close_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowclosefun(cbfun)
_window_close_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowCloseCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0] |
java | private static boolean nonEmptyIntersection(String[] a, String[] b) {
if (a == null || b == null || a.length == 0 || b.length == 0) {
return false;
}
for (String toFind : a) {
if (contains(b, toFind)) {
return true;
}
}
return false;
} |
python | def repo_groups(self, project_key, repo_key, limit=99999, filter_str=None):
"""
Get repository Groups
:param project_key:
:param repo_key:
:param limit: OPTIONAL: The limit of the number of groups to return, this may be restricted by
fixed system limits. Default by built-in method: 99999
:param filter_str: OPTIONAL: group filter string
:return:
"""
url = 'rest/api/1.0/projects/{project_key}/repos/{repo_key}/permissions/groups'.format(
project_key=project_key,
repo_key=repo_key)
params = {}
if limit:
params['limit'] = limit
if filter_str:
params['filter'] = filter_str
return (self.get(url, params=params) or {}).get('values') |
java | public static double meanTruncLower(double mu, double sigma, double lowerBound) {
double alpha = (lowerBound - mu) / sigma;
double phiAlpha = densityNonTrunc(alpha, 0, 1.0);
double cPhiAlpha = cumulativeNonTrunc(alpha, 0, 1.0);
return mu + sigma * phiAlpha / (1.0 - cPhiAlpha);
} |
java | public static double[] ttbd(double target, double[] lower, double[] upper, RandomGenerator randomGenerator) {
// Check dimension match:
if (lower.length != upper.length) {
throw new IllegalArgumentException("Lower and upper bounds must be of same length.");
}
// Get indices and shuffle:
final int[] indices = DMatrixUtils.shuffleIndices(lower.length, randomGenerator);
// Call auxiliary function:
final double[] ttbdValues = DMatrixUtils._ttbdInner(
target,
DMatrixUtils.applyIndices(lower, indices),
DMatrixUtils.applyIndices(upper, indices),
randomGenerator);
// Reapply original indices and return:
return DMatrixUtils.applyIndices(ttbdValues, DMatrixUtils.getOrder(indices));
} |
python | def geo_length(arg, use_spheroid=None):
"""
Compute length of a geo spatial data
Parameters
----------
arg : geometry or geography
use_spheroid : default None
Returns
-------
length : double scalar
"""
op = ops.GeoLength(arg, use_spheroid)
return op.to_expr() |
python | def validate_argsort_with_ascending(ascending, args, kwargs):
"""
If 'Categorical.argsort' is called via the 'numpy' library, the
first parameter in its signature is 'axis', which takes either
an integer or 'None', so check if the 'ascending' parameter has
either integer type or is None, since 'ascending' itself should
be a boolean
"""
if is_integer(ascending) or ascending is None:
args = (ascending,) + args
ascending = True
validate_argsort_kind(args, kwargs, max_fname_arg_count=3)
return ascending |
python | def devices(self, timeout=None):
"""Executes adb devices -l and returns a list of objects describing attached devices.
:param timeout: optional integer specifying the maximum time in
seconds for any spawned adb process to complete before
throwing an ADBTimeoutError. This timeout is per adb call. The
total time spent may exceed this value. If it is not
specified, the value set in the ADBHost constructor is used.
:returns: an object contain
:raises: * ADBTimeoutError
* ADBError
The output of adb devices -l ::
$ adb devices -l
List of devices attached
b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw
is parsed and placed into an object as in
[{'device_serial': 'b313b945', 'state': 'device', 'product': 'd2vzw',
'usb': '1-7', 'device': 'd2vzw', 'model': 'SCH_I535' }]
"""
# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw
# from Android system/core/adb/transport.c statename()
re_device_info = re.compile(r'([^\s]+)\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)')
devices = []
lines = self.command_output(["devices", "-l"], timeout=timeout).split('\n')
for line in lines:
if line == 'List of devices attached ':
continue
match = re_device_info.match(line)
if match:
device = {
'device_serial': match.group(1),
'state': match.group(2)
}
remainder = line[match.end(2):].strip()
if remainder:
try:
device.update(dict([j.split(':')
for j in remainder.split(' ')]))
except ValueError:
self._logger.warning('devices: Unable to parse '
'remainder for device %s' % line)
devices.append(device)
return devices |
python | def resample(grid, wl, flux):
""" Resample spectrum onto desired grid """
flux_rs = (interpolate.interp1d(wl, flux))(grid)
return flux_rs |
java | public void init(final SQLRouteResult routeResult) throws SQLException {
setSqlStatement(routeResult.getSqlStatement());
getExecuteGroups().addAll(obtainExecuteGroups(routeUnits));
} |
java | protected Cluster<BiclusterModel> defineBicluster(BitSet rows, BitSet cols) {
ArrayDBIDs rowIDs = rowsBitsetToIDs(rows);
int[] colIDs = colsBitsetToIDs(cols);
return new Cluster<>(rowIDs, new BiclusterModel(colIDs));
} |
python | def help_center_article_subscriptions(self, article_id, locale=None, **kwargs):
"https://developer.zendesk.com/rest_api/docs/help_center/subscriptions#list-article-subscriptions"
api_path = "/api/v2/help_center/articles/{article_id}/subscriptions.json"
api_path = api_path.format(article_id=article_id)
if locale:
api_opt_path = "/api/v2/help_center/{locale}/articles/{article_id}/subscriptions.json"
api_path = api_opt_path.format(article_id=article_id, locale=locale)
return self.call(api_path, **kwargs) |
python | def _get_shift_matrix(self):
"""np.array: The Camera's lens-shift matrix."""
return np.array([[1., 0., self.x_shift, 0.],
[0., 1., self.y_shift, 0.],
[0., 0., 1., 0.],
[0., 0., 0., 1.]], dtype=np.float32) |
python | def validate(self, **kwargs):
"""
Validates each entry (passing the provided arguments down to them and
also tries to resolve all cross-references between the entries.
"""
self.check_crossrefs()
for value in self.values():
value.validate(**kwargs) |
python | def child_(self, ctx):
"""
If the root resource is requested, return the primary
application's front page, if a primary application has been
chosen. Otherwise return 'self', since this page can render a
simple index.
"""
if self.frontPageItem.defaultApplication is None:
return self.webViewer.wrapModel(
_OfferingsFragment(self.frontPageItem))
else:
return SharingIndex(self.frontPageItem.defaultApplication.open(),
self.webViewer).locateChild(ctx, [''])[0] |
python | def tkvrsn(item):
"""
Given an item such as the Toolkit or an entry point name, return
the latest version string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/tkvrsn_c.html
:param item: Item for which a version string is desired.
:type item: str
:return: the latest version string.
:rtype: str
"""
item = stypes.stringToCharP(item)
return stypes.toPythonString(libspice.tkvrsn_c(item)) |
java | public static HtmlPage toHtmlPage(InputStream inputStream) {
try {
return toHtmlPage(IOUtils.toString(inputStream));
} catch (IOException e) {
throw new RuntimeException("Error creating HtmlPage from InputStream.", e);
}
} |
python | def copy(self):
"""Adds menus to itself, required by ViewBox"""
# copied from pyqtgraph ViewBoxMenu
m = QtGui.QMenu()
for sm in self.subMenus():
if isinstance(sm, QtGui.QMenu):
m.addMenu(sm)
else:
m.addAction(sm)
m.setTitle(self.title())
return m |
java | public static BigDecimal abs(EvaluationContext ctx, Object number) {
return Conversions.toDecimal(number, ctx).abs();
} |
python | def escape_html(text, escape_quotes=False):
"""Escape all HTML tags, avoiding XSS attacks.
< => <
> => >
& => &:
@param text: text to be escaped from HTML tags
@param escape_quotes: if True, escape any quote mark to its HTML entity:
" => "
' => '
"""
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if escape_quotes:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text |
python | def getFileKeys(self):
"""
Retrieve a list of file keys that have been read into the database.
This is a utility method that can be used to programmatically access the GsshaPy file objects. Use these keys
in conjunction with the dictionary returned by the getFileObjects method.
Returns:
list: List of keys representing file objects that have been read into the database.
"""
files = self.getFileObjects()
files_list = []
for key, value in files.iteritems():
if value:
files_list.append(key)
return files_list |
python | def extract_ids(text, extractors):
"""
Uses `extractors` to extract citation identifiers from a text.
:Parameters:
text : str
The text to process
extractors : `list`(`extractor`)
A list of extractors to apply to the text
:Returns:
`iterable` -- a generator of extracted identifiers
"""
for extractor in extractors:
for id in extractor.extract(text):
yield id |
java | public boolean isMaterialized(Object object)
{
IndirectionHandler handler = getIndirectionHandler(object);
return handler == null || handler.alreadyMaterialized();
} |
java | public static Instance findInstanceByPath( AbstractApplication application, String instancePath ) {
Collection<Instance> currentList = new ArrayList<> ();
if( application != null )
currentList.addAll( application.getRootInstances());
List<String> instanceNames = new ArrayList<> ();
if( instancePath != null )
instanceNames.addAll( Arrays.asList( instancePath.split( "/" )));
if( instanceNames.size() > 0
&& Utils.isEmptyOrWhitespaces( instanceNames.get( 0 )))
instanceNames.remove( 0 );
// Every path segment points to an instance
Instance result = null;
for( String instanceName : instanceNames ) {
result = null;
for( Instance instance : currentList ) {
if( instanceName.equals( instance.getName())) {
result = instance;
break;
}
}
// The segment does not match any instance
if( result == null )
break;
// Otherwise, prepare the next iteration
currentList = result.getChildren();
}
return result;
} |
java | @Override public Node insertChildAt(Node toInsert, int index) {
if (toInsert instanceof Element && getDocumentElement() != null) {
throw new DOMException(DOMException.HIERARCHY_REQUEST_ERR,
"Only one root element allowed");
}
if (toInsert instanceof DocumentType && getDoctype() != null) {
throw new DOMException(DOMException.HIERARCHY_REQUEST_ERR,
"Only one DOCTYPE element allowed");
}
return super.insertChildAt(toInsert, index);
} |
java | @Override
public ListV2LoggingLevelsResult listV2LoggingLevels(ListV2LoggingLevelsRequest request) {
request = beforeClientExecution(request);
return executeListV2LoggingLevels(request);
} |
java | protected <T extends AbstractResource> void addStandardHeaders(URLConnection con, T resource) {
con.setRequestProperty("User-Agent", userAgent);
con.setRequestProperty("Accept", resource.getAcceptedTypes());
} |
java | public static com.liferay.commerce.product.model.CPInstance updateCPInstance(
com.liferay.commerce.product.model.CPInstance cpInstance) {
return getService().updateCPInstance(cpInstance);
} |
java | private ConstraintNetwork[] samplingPeakCollection(HashMap<SymbolicVariableActivity, SpatialFluent> aTOsf) {
Vector<SymbolicVariableActivity> observation = new Vector<SymbolicVariableActivity>();
Vector<SymbolicVariableActivity> activities = new Vector<SymbolicVariableActivity>();
for (SymbolicVariableActivity act : aTOsf.keySet()) {
activities.add(act);
if(act.getTemporalVariable().getEST() == act.getTemporalVariable().getLST())
observation.add(act);
}
if (activities != null && !activities.isEmpty()) {
SymbolicVariableActivity[] groundVars = activities.toArray(new SymbolicVariableActivity[activities.size()]);
Arrays.sort(groundVars, new ActivityComparator(true));
Vector<ConstraintNetwork> ret = new Vector<ConstraintNetwork>();
Vector<Vector<SymbolicVariableActivity>> overlappingAll = new Vector<Vector<SymbolicVariableActivity>>();
// if an activity is spatially inconsistent even with itself
for (SymbolicVariableActivity act : activities) {
if (isConflicting(new SymbolicVariableActivity[] { act }, aTOsf)) {
ConstraintNetwork temp = new ConstraintNetwork(null);
temp.addVariable(act);
ret.add(temp);
}
}
// groundVars are ordered activities
for (int i = 0; i < groundVars.length; i++) {
Vector<SymbolicVariableActivity> overlapping = new Vector<SymbolicVariableActivity>();
overlapping.add(groundVars[i]);
long start = (groundVars[i]).getTemporalVariable().getEST();
long end = (groundVars[i]).getTemporalVariable().getEET();
Bounds intersection = new Bounds(start, end);
for (int j = 0; j < groundVars.length; j++) {
if (i != j) {
start = (groundVars[j]).getTemporalVariable().getEST();
end = (groundVars[j]).getTemporalVariable().getEET();
Bounds nextInterval = new Bounds(start, end);
// System.out.println("nextinterval: " + groundVars[j] + " " +nextInterval);
// System.out.println("____________________________________");
Bounds intersectionNew = intersection.intersectStrict(nextInterval);
if (intersectionNew != null) {
overlapping.add(groundVars[j]);
if (isConflicting(overlapping.toArray(new SymbolicVariableActivity[overlapping.size()]), aTOsf)) {
overlappingAll.add(overlapping);
if(overlapping.containsAll(observation))
break;
}
else
intersection = intersectionNew;
}
}
}
}
if(overlappingAll.size() > 0){
Vector<Vector<SymbolicVariableActivity>> retActivities = new Vector<Vector<SymbolicVariableActivity>>();
Vector<SymbolicVariableActivity> current = overlappingAll.get(0);
for (int i = 1; i < overlappingAll.size(); i++) {
if(!isEqual(current, overlappingAll.get(i))){
retActivities.add(current);
current = overlappingAll.get(i);
}
}
retActivities.add(current);
for (Vector<SymbolicVariableActivity> actVec : retActivities) {
ConstraintNetwork tmp = new ConstraintNetwork(null);
for (SymbolicVariableActivity act : actVec){
tmp.addVariable(act);
}
ret.add(tmp);
}
// System.out.println("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<");
// for (int i = 0; i < ret.size(); i++) {
// System.out.println("ret: " + ret);
// }
// System.out.println("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<");
// return new ConstraintNetwork[]{ret.size()};
return ret.toArray(new ConstraintNetwork[ret.size()]);
}
}
return (new ConstraintNetwork[0]);
} |
python | def play(self, sox_effects=()):
""" Play the segment. """
audio_data = self.getAudioData()
logging.getLogger().info("Playing speech segment (%s): '%s'" % (self.lang, self))
cmd = ["sox", "-q", "-t", "mp3", "-"]
if sys.platform.startswith("win32"):
cmd.extend(("-t", "waveaudio"))
cmd.extend(("-d", "trim", "0.1", "reverse", "trim", "0.07", "reverse")) # "trim", "0.25", "-0.1"
cmd.extend(sox_effects)
logging.getLogger().debug("Start player process")
p = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.DEVNULL)
p.communicate(input=audio_data)
if p.returncode != 0:
raise RuntimeError()
logging.getLogger().debug("Done playing") |
java | @NonNull
public static <T> PutResults<T> newInstance(@NonNull Map<T, PutResult> putResults) {
return new PutResults<T>(putResults);
} |
python | def get_lm_challenge_response(self):
"""
[MS-NLMP] v28.0 2016-07-14
3.3.1 - NTLM v1 Authentication
3.3.2 - NTLM v2 Authentication
This method returns the LmChallengeResponse key based on the ntlm_compatibility chosen
and the target_info supplied by the CHALLENGE_MESSAGE. It is quite different from what
is set in the document as it combines the NTLMv1, NTLM2 and NTLMv2 methods into one
and calls separate methods based on the ntlm_compatibility flag chosen.
:return: response (LmChallengeResponse) - The LM response to the server challenge. Computed by the client
"""
if self._negotiate_flags & NegotiateFlags.NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY and self._ntlm_compatibility < 3:
response = ComputeResponse._get_LMv1_with_session_security_response(self._client_challenge)
elif 0 <= self._ntlm_compatibility <= 1:
response = ComputeResponse._get_LMv1_response(self._password, self._server_challenge)
elif self._ntlm_compatibility == 2:
# Based on the compatibility level we don't want to use LM responses, ignore the session_base_key as it is returned in nt
response, ignore_key = ComputeResponse._get_NTLMv1_response(self._password, self._server_challenge)
else:
"""
[MS-NLMP] v28.0 page 45 - 2016-07-14
3.1.5.12 Client Received a CHALLENGE_MESSAGE from the Server
If NTLMv2 authentication is used and the CHALLENGE_MESSAGE TargetInfo field has an MsvAvTimestamp present,
the client SHOULD NOT send the LmChallengeResponse and SHOULD send Z(24) instead.
"""
response = ComputeResponse._get_LMv2_response(self._user_name, self._password, self._domain_name,
self._server_challenge,
self._client_challenge)
if self._server_target_info is not None:
timestamp = self._server_target_info[TargetInfo.MSV_AV_TIMESTAMP]
if timestamp is not None:
response = b'\0' * 24
return response |
python | def getVariantAnnotationSet(self, id_):
"""
Returns the AnnotationSet in this dataset with the specified 'id'
"""
if id_ not in self._variantAnnotationSetIdMap:
raise exceptions.AnnotationSetNotFoundException(id_)
return self._variantAnnotationSetIdMap[id_] |
java | public static <T> Level0ListOperator<List<T>,T> on(final List<T> target) {
return onList(target);
} |
java | @Override
public GetQualificationTypeResult getQualificationType(GetQualificationTypeRequest request) {
request = beforeClientExecution(request);
return executeGetQualificationType(request);
} |
java | public void setMessage(String message) {
checkFrozen();
if (isEnabled() && CmsStringUtil.isEmptyOrWhitespaceOnly(message)) {
throw new CmsIllegalArgumentException(Messages.get().container(Messages.ERR_LOGIN_MESSAGE_BAD_MESSAGE_0));
}
m_message = message;
} |
python | def check_channel(fcn):
"""Decorator that ensures a valid channel passed in.
Args:
fcn (function): Function that has a ChannelResource as its second argument.
Returns:
(function): Wraps given function with one that checks for a valid channel.
"""
def wrapper(*args, **kwargs):
if not isinstance(args[1], ChannelResource):
raise RuntimeError('resource must be an instance of intern.resource.boss.ChannelResource.')
if not args[1].cutout_ready:
raise PartialChannelResourceError(
'ChannelResource not fully initialized. Use intern.remote.BossRemote.get_channel({}, {}, {})'.format(
args[1].name, args[1].coll_name, args[1].exp_name))
return fcn(*args, **kwargs)
return wrapper |
python | def message_handler(type_, from_):
"""
Deprecated alias of :func:`.dispatcher.message_handler`.
.. deprecated:: 0.9
"""
import aioxmpp.dispatcher
return aioxmpp.dispatcher.message_handler(type_, from_) |
java | private void saveToPropertyVfsBundle() throws CmsException {
for (Locale l : m_changedTranslations) {
SortedProperties props = m_localizations.get(l);
LockedFile f = m_lockedBundleFiles.get(l);
if ((null != props) && (null != f)) {
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Writer writer = new OutputStreamWriter(outputStream, f.getEncoding());
props.store(writer, null);
byte[] contentBytes = outputStream.toByteArray();
CmsFile file = f.getFile();
file.setContents(contentBytes);
String contentEncodingProperty = m_cms.readPropertyObject(
file,
CmsPropertyDefinition.PROPERTY_CONTENT_ENCODING,
false).getValue();
if ((null == contentEncodingProperty) || !contentEncodingProperty.equals(f.getEncoding())) {
m_cms.writePropertyObject(
m_cms.getSitePath(file),
new CmsProperty(
CmsPropertyDefinition.PROPERTY_CONTENT_ENCODING,
f.getEncoding(),
f.getEncoding()));
}
m_cms.writeFile(file);
} catch (IOException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_READING_FILE_UNSUPPORTED_ENCODING_2,
f.getFile().getRootPath(),
f.getEncoding()),
e);
}
}
}
} |
java | public static boolean isLineageEvent(GobblinTrackingEvent event) {
String eventType = event.getMetadata().get(EVENT_TYPE);
return StringUtils.isNotEmpty(eventType) && eventType.equals(LINEAGE_EVENT_TYPE);
} |
python | def get_edge_pathways(self, edge_id):
"""Get the pathways associated with an edge.
Parameters
-----------
edge_id : tup(int, int)
Returns
-----------
tup(str, str)|None, the edge as a pair of 2 pathways if the edge id
is in this network
"""
vertex0_id, vertex1_id = edge_id
pw0 = self.get_pathway_from_vertex_id(vertex0_id)
pw1 = self.get_pathway_from_vertex_id(vertex1_id)
if not pw0 or not pw1:
return None
return (pw0, pw1) |
java | @Override
public ExtendedSet<T> union(Collection<? extends T> other) {
ExtendedSet<T> clone = clone();
clone.addAll(other);
return clone;
} |
java | public static AdvancedCache failSilentReadCache(AdvancedCache cache) {
return cache.withFlags(
Flag.FAIL_SILENTLY,
Flag.ZERO_LOCK_ACQUISITION_TIMEOUT
);
} |
python | def _create(self, rawtitle):
"""Create a page with this title, if it doesn't exist.
This method first checks whether a page with the same slug
(sanitized name) exists_on_disk. If it does, it doesn't do antyhing.
Otherwise, the relevant attributes are created.
Nothing is written to disc (to the source file). You must call
the write_page method to do that. Doing it this way, after
creation you can call a method to add random text, for example,
before committing the page to disk.
"""
slug = util.make_slug(rawtitle)
if self.site.page_exists_on_disk(slug):
raise ValueError
#print "Attempted to create a page which already exists."
#return False
self._title = unicode(rawtitle,"UTF-8")
self._slug = slug
self._dirs['source_dir'] = os.path.join(self.site.dirs['source'], slug)
self._dirs['source_filename'] = os.path.join(self._dirs['source_dir'],
slug + '.md')
self._dirs['www_dir'] = os.path.join(self.site.dirs['www'], slug)
self._dirs['www_filename'] = os.path.join(self._dirs['www_dir'], \
'index.html')
self._config = self._create_config()
return True |
python | def compare(times_list=None,
name=None,
include_list=True,
include_stats=True,
delim_mode=False,
format_options=None):
"""
Produce a formatted comparison of timing datas.
Notes:
If no times_list is provided, produces comparison reports on all parallel
subdivisions present at the root level of the current timer. To compare
parallel subdivisions at a lower level, get the times data, navigate
within it to the parallel list of interest, and provide that as input
here. As with report(), any further parallel subdivisions encountered
have only their member with the greatest total time reported on (no
branching).
Args:
times_list (Times, optional): list or tuple of Times objects. If not
provided, uses current root timer.
name (any, optional): Identifier, passed through str().
include_list (bool, optional): Display stamps hierarchy.
include_stats (bool, optional): Display stamp comparison statistics.
delim_mode (bool, optional): If True, format for spreadsheet.
format_options (None, optional): Formatting options, see below.
Formatting Keywords & Defaults:
Human-readable Mode
- 'stamp_name_width': 18
- 'list_column_width': 12
- 'list_tab_width': 2
- 'stat_column_width': 8
- 'stat_tab_width': 2
- 'indent_symbol: ' ' (one space)
Delimited Mode
- 'delimiter': '\t' (tab)
- 'ident_symbol': '+'
Returns:
str: Times data comparison as formatted string.
Raises:
TypeError: If any element of provided collection is not a Times object.
"""
if times_list is None:
rep = ''
for par_dict in itervalues(f.root.times.par_subdvsn):
for par_name, par_list in iteritems(par_dict):
rep += report_loc.compare(par_list,
par_name,
include_list,
include_stats,
delim_mode,
format_options)
else:
if not isinstance(times_list, (list, tuple)):
raise TypeError("Expected a list/tuple of times instances for param 'times_list'.")
if not all([isinstance(times, Times) for times in times_list]):
raise TypeError("At least one member of param 'times_list' is not a Times object.")
rep = report_loc.compare(times_list,
name,
include_list,
include_stats,
delim_mode,
format_options)
return rep |
java | public int read(byte []buffer, int offset, int length, long timeout)
throws IOException
{
if (length == 0) {
throw new IllegalArgumentException();
}
long requestExpireTime = _requestExpireTime;
if (requestExpireTime > 0 && requestExpireTime < CurrentTime.currentTime()) {
close();
throw new ClientDisconnectException(L.l("{0}: request-timeout read",
addressRemote()));
}
int result = 0;
synchronized (_readLock) {
long now = CurrentTime.getCurrentTimeActual();
long expires;
// gap is because getCurrentTimeActual() isn't exact
long gap = 20;
if (timeout >= 0)
expires = timeout + now - gap;
else
expires = _socketTimeout + now - gap;
do {
result = readNative(_socketFd, buffer, offset, length, timeout);
now = CurrentTime.getCurrentTimeActual();
timeout = expires - now;
} while (result == JniStream.TIMEOUT_EXN && timeout > 0);
}
return result;
} |
java | public static boolean isValidJavaEncoding(String javaEncoding) {
if (javaEncoding != null) {
int length = javaEncoding.length();
if (length > 0) {
for (int i = 1; i < length; i++) {
char c = javaEncoding.charAt(i);
if ((c < 'A' || c > 'Z') && (c < 'a' || c > 'z') &&
(c < '0' || c > '9') && c != '.' && c != '_' &&
c != '-') {
return false;
}
}
return true;
}
}
return false;
} |
python | def vfr_hud_encode(self, airspeed, groundspeed, heading, throttle, alt, climb):
'''
Metrics typically displayed on a HUD for fixed wing aircraft
airspeed : Current airspeed in m/s (float)
groundspeed : Current ground speed in m/s (float)
heading : Current heading in degrees, in compass units (0..360, 0=north) (int16_t)
throttle : Current throttle setting in integer percent, 0 to 100 (uint16_t)
alt : Current altitude (MSL), in meters (float)
climb : Current climb rate in meters/second (float)
'''
return MAVLink_vfr_hud_message(airspeed, groundspeed, heading, throttle, alt, climb) |
java | public static Set<String> getTickets(Set<String> basedTickets, RoxableTest methodAnnotation, RoxableTestClass classAnnotation) {
Set<String> tickets;
if (basedTickets == null) {
tickets = new HashSet<>();
}
else {
tickets = populateTickets(basedTickets, new HashSet<String>());
}
if (classAnnotation != null && classAnnotation.tickets() != null) {
tickets = populateTickets(new HashSet<>(Arrays.asList(classAnnotation.tickets())), tickets);
}
if (methodAnnotation != null && methodAnnotation.tickets() != null) {
tickets = populateTickets(new HashSet<>(Arrays.asList(methodAnnotation.tickets())), tickets);
}
return tickets;
} |
python | def cut_selection(self):
"""
Return a (:class:`.Document`, :class:`.ClipboardData`) tuple, where the
document represents the new document when the selection is cut, and the
clipboard data, represents whatever has to be put on the clipboard.
"""
if self.selection:
cut_parts = []
remaining_parts = []
new_cursor_position = self.cursor_position
last_to = 0
for from_, to in self.selection_ranges():
if last_to == 0:
new_cursor_position = from_
remaining_parts.append(self.text[last_to:from_])
cut_parts.append(self.text[from_:to + 1])
last_to = to + 1
remaining_parts.append(self.text[last_to:])
cut_text = '\n'.join(cut_parts)
remaining_text = ''.join(remaining_parts)
# In case of a LINES selection, don't include the trailing newline.
if self.selection.type == SelectionType.LINES and cut_text.endswith('\n'):
cut_text = cut_text[:-1]
return (Document(text=remaining_text, cursor_position=new_cursor_position),
ClipboardData(cut_text, self.selection.type))
else:
return self, ClipboardData('') |
python | def get_last_traded_dt(self, asset, dt):
"""
Get the latest minute on or before ``dt`` in which ``asset`` traded.
If there are no trades on or before ``dt``, returns ``pd.NaT``.
Parameters
----------
asset : zipline.asset.Asset
The asset for which to get the last traded minute.
dt : pd.Timestamp
The minute at which to start searching for the last traded minute.
Returns
-------
last_traded : pd.Timestamp
The dt of the last trade for the given asset, using the input
dt as a vantage point.
"""
rf = self._roll_finders[asset.roll_style]
sid = (rf.get_contract_center(asset.root_symbol,
dt,
asset.offset))
if sid is None:
return pd.NaT
contract = rf.asset_finder.retrieve_asset(sid)
return self._bar_reader.get_last_traded_dt(contract, dt) |
python | def parse(argv, level=0):
"""
Parse sub-arguments between `[` and `]` recursively.
Examples
--------
```
>>> argv = ['--foo', 'bar', '--buz', '[', 'qux', '--quux', 'corge', ']']
>>> subarg.parse(argv)
['--foo', 'bar', '--buz', ['qux', '--quux', 'corge']]
```
Parameters
----------
argv : list of strings
list of arguments strings like `sys.argv`.
Returns
-------
nested list of arguments strings.
"""
nargs = []
for i in range(len(argv)):
if argv[i] == '[':
level += 1
if level == 1:
index = i + 1
elif argv[i] == ']':
level -= 1
sub = argv[index:i]
if level == 0:
nargs.append(parse(sub, level))
elif level == 0:
nargs.append(argv[i])
return nargs |
python | def do_add_item(self, args):
"""Add item command help"""
if args.food:
add_item = args.food
elif args.sport:
add_item = args.sport
elif args.other:
add_item = args.other
else:
add_item = 'no items'
self.poutput("You added {}".format(add_item)) |
java | WritableByteChannel createInternal(URI path, CreateFileOptions options)
throws IOException {
// Validate the given path. false == do not allow empty object name.
StorageResourceId resourceId = pathCodec.validatePathAndGetId(path, false);
if (options.getExistingGenerationId() != StorageResourceId.UNKNOWN_GENERATION_ID) {
resourceId = new StorageResourceId(
resourceId.getBucketName(),
resourceId.getObjectName(),
options.getExistingGenerationId());
}
WritableByteChannel channel = gcs.create(resourceId, objectOptionsFromFileOptions(options));
tryUpdateTimestampsForParentDirectories(ImmutableList.of(path), ImmutableList.<URI>of());
return channel;
} |
python | def update_values(ims, image_id, iq_zeropt=True, comment=False, snr=False, commdict=None):
"""
Update a row in ossuary with
:param ims: an ImageQuery, contains image table and a connector
:param image_id: the primary key of the row to be updated
:param iq_zeropt: Keyword set if iq and zeropoint are to be checked for updating
:param comment: Keyword set if image is to have a comment of Stephen's added
:param commdict: The dictionary parsed from Stephen's file of comments
:return: No return, just updates ossuary.
"""
updating_params = {}
if iq_zeropt:
updating_params = get_iq_and_zeropoint(image_id, {})
if comment:
updating_params = {'comment': commdict[str(image_id)]}
if snr:
updating_params = get_snr(image_id, {})
ss = ims.images.update(ims.images.c.image_id == image_id)
ims.conn.execute(ss, updating_params)
return |
java | private <T> T loadRequiredModel(Class<T> clzz, String location) throws MojoExecutionException {
return ModelLoaderUtils.loadModel(clzz, getResourceLocation(location));
} |
python | def update_letter(self, letter_id, letter_dict):
"""
Updates a letter
:param letter_id: the letter id
:param letter_dict: dict
:return: dict
"""
return self._create_put_request(
resource=LETTERS,
billomat_id=letter_id,
send_data=letter_dict
) |
python | def get_objects_dex(self):
"""
Yields all dex objects inclduing their Analysis objects
:returns: tuple of (sha256, DalvikVMFormat, Analysis)
"""
# TODO: there is no variant like get_objects_apk
for digest, d in self.analyzed_dex.items():
yield digest, d, self.analyzed_vms[digest] |
python | def keyPressEvent(self, ev):
"""Check the event. Return True if processed and False otherwise
"""
if ev.key() in (Qt.Key_Shift, Qt.Key_Control,
Qt.Key_Meta, Qt.Key_Alt,
Qt.Key_AltGr, Qt.Key_CapsLock,
Qt.Key_NumLock, Qt.Key_ScrollLock):
return False # ignore modifier pressing. Will process key pressing later
self._processingKeyPress = True
try:
ret = self._mode.keyPressEvent(ev)
finally:
self._processingKeyPress = False
return ret |
java | public void addShardStart(TableDefinition tableDef, int shardNumber, Date startDate) {
assert tableDef.isSharded() && shardNumber > 0;
addColumn(SpiderService.termsStoreName(tableDef),
SHARDS_ROW_KEY,
Integer.toString(shardNumber),
Utils.toBytes(Long.toString(startDate.getTime())));
} |
python | def run(self):
"""Start thread run here
"""
try:
if self.command == "pxer":
self.ipmi_method(command="pxe")
if self.status == 0 or self.status == None:
self.command = "reboot"
else:
return
self.ipmi_method(self.command)
except Exception as e:
self.error = str(e) |
java | private JSONObject addAttributeFromSingleValue(HistoryKey pKey, String pAttrName, Object pValue, long pTimestamp) {
HistoryEntry entry = getEntry(pKey,pValue,pTimestamp);
return entry != null ?
addToHistoryEntryAndGetCurrentHistory(new JSONObject(), entry, pAttrName, pValue, pTimestamp) :
null;
} |
python | def bfd_parse(data):
"""
Parse raw packet and return BFD class from packet library.
"""
pkt = packet.Packet(data)
i = iter(pkt)
eth_pkt = next(i)
assert isinstance(eth_pkt, ethernet.ethernet)
ipv4_pkt = next(i)
assert isinstance(ipv4_pkt, ipv4.ipv4)
udp_pkt = next(i)
assert isinstance(udp_pkt, udp.udp)
udp_payload = next(i)
return bfd.bfd.parser(udp_payload)[0] |
java | private void preDelete(User user) throws Exception
{
for (UserEventListener listener : listeners)
{
listener.preDelete(user);
}
} |
python | def assign_reads_to_database(query, database_fasta, out_path, params=None):
"""Assign a set of query sequences to a reference database
database_fasta_fp: absolute file path to the reference database
query_fasta_fp: absolute file path to query sequences
output_fp: absolute file path of the file to be output
params: dict of BWA specific parameters.
* Specify which algorithm to use (bwa-short or bwasw) using the
dict key "algorithm"
* if algorithm is bwasw, specify params for the bwa bwasw
subcommand
* if algorithm is bwa-short, specify params for the bwa samse
subcommand
* if algorithm is bwa-short, must also specify params to use with
bwa aln, which is used to get the sai file necessary to run samse.
bwa aln params should be passed in using dict key "aln_params" and
the associated value should be a dict of params for the bwa aln
subcommand
* if a temporary directory is not specified in params using dict
key "temp_dir", it will be assumed to be /tmp
This method returns an open file object (SAM format).
"""
if params is None:
params = {}
# set the output path
params['-f'] = out_path
# if the algorithm is not specified in the params dict, or the algorithm
# is not recognized, raise an exception
if 'algorithm' not in params:
raise InvalidArgumentApplicationError("Must specify which algorithm to"
" use ('bwa-short' or 'bwasw')")
elif params['algorithm'] not in ('bwa-short', 'bwasw'):
raise InvalidArgumentApplicationError("Unknown algorithm '%s' Please "
"enter either 'bwa-short' or "
"'bwasw'." % params['algorithm'])
# if the temp directory is not specified, assume /tmp
if 'temp_dir' not in params:
params['temp_dir'] = '/tmp'
# if the algorithm is bwa-short, we must build use bwa aln to get an sai
# file before calling bwa samse on that sai file, so we need to know how
# to run bwa aln. Therefore, we must ensure there's an entry containing
# those parameters
if params['algorithm'] == 'bwa-short':
if 'aln_params' not in params:
raise InvalidArgumentApplicationError("With bwa-short, need to "
"specify a key 'aln_params' "
"and its value, a dictionary"
" to pass to bwa aln, since"
" bwa aln is an intermediate"
" step when doing "
"bwa-short.")
# we have this params dict, with "algorithm" and "temp_dir", etc which are
# not for any of the subcommands, so make a new params dict that is the
# same as the original minus these addendums
subcommand_params = {}
for k, v in params.iteritems():
if k not in ('algorithm', 'temp_dir', 'aln_params'):
subcommand_params[k] = v
# build index from database_fasta
# get a temporary file name that is not in use
_, index_prefix = mkstemp(dir=params['temp_dir'], suffix='')
create_bwa_index_from_fasta_file(database_fasta, {'-p': index_prefix})
# if the algorithm is bwasw, things are pretty simple. Just instantiate
# the proper controller and set the files
if params['algorithm'] == 'bwasw':
bwa = BWA_bwasw(params=subcommand_params)
files = {'prefix': index_prefix, 'query_fasta': query}
# if the algorithm is bwa-short, it's not so simple
elif params['algorithm'] == 'bwa-short':
# we have to call bwa_aln to get the sai file needed for samse
# use the aln_params we ensured we had above
bwa_aln = BWA_aln(params=params['aln_params'])
aln_files = {'prefix': index_prefix, 'fastq_in': query}
# get the path to the sai file
sai_file_path = bwa_aln(aln_files)['output'].name
# we will use that sai file to run samse
bwa = BWA_samse(params=subcommand_params)
files = {'prefix': index_prefix, 'sai_in': sai_file_path,
'fastq_in': query}
# run which ever app controller we decided was correct on the files
# we set up
result = bwa(files)
# they both return a SAM file, so return that
return result['output'] |
java | public void free()
{
if (BaseApplet.getSharedInstance() != null)
if (BaseApplet.getSharedInstance().getApplet() == null)
if (this.getParent() != null)
this.getParent().remove(this); // Remove from frame
if (this.getHelpView() != null)
this.getHelpView().free();
this.freeSubComponents(this); // Free all the sub-screens.
if (m_recordOwnerCollection != null)
m_recordOwnerCollection.free();
m_recordOwnerCollection = null;
boolean bEmptyTaskList = true;
if (m_application != null)
bEmptyTaskList = m_application.removeTask(this); // Remove this session from the list
if (bEmptyTaskList)
this.quit();
if (Application.getRootApplet() == this)
{
Application.setRootApplet(null);
if (m_application.getTaskList() != null)
{
for (Task task : m_application.getTaskList().keySet())
{
if (task instanceof BaseApplet)
Application.setRootApplet((BaseApplet)task); // New main applet
}
}
}
m_application = null;
if (bEmptyTaskList)
if (gbStandAlone)
if (ClassServiceUtility.getClassService().getClassFinder(null) == null)
System.exit(0); // Hack - Can't figure out how to get this to applet to quit every time.
} |
java | public static Point spin(Point point, double angle) {
return spin(Collections.singletonList(point), angle).get(0);
} |
java | private MappedClass addMappedClass(final MappedClass mc, final boolean validate) {
addConverters(mc);
if (validate && !mc.isInterface()) {
mc.validate(this);
}
mappedClasses.put(mc.getClazz().getName(), mc);
Set<MappedClass> mcs = mappedClassesByCollection.get(mc.getCollectionName());
if (mcs == null) {
mcs = new CopyOnWriteArraySet<MappedClass>();
final Set<MappedClass> temp = mappedClassesByCollection.putIfAbsent(mc.getCollectionName(), mcs);
if (temp != null) {
mcs = temp;
}
}
mcs.add(mc);
return mc;
} |
python | def username(elk, user_number):
"""Return name of user."""
if user_number >= 0 and user_number < elk.users.max_elements:
return elk.users[user_number].name
if user_number == 201:
return "*Program*"
if user_number == 202:
return "*Elk RP*"
if user_number == 203:
return "*Quick arm*"
return "" |
python | def compile_results(self):
"""Compile all results for the current test
"""
self._init_dataframes()
self.total_transactions = len(self.main_results['raw'])
self._init_dates() |
java | @Path("{tunnel}")
public TunnelResource getTunnel(@PathParam("tunnel") String tunnelUUID)
throws GuacamoleException {
Map<String, UserTunnel> tunnels = session.getTunnels();
// Pull tunnel with given UUID
final UserTunnel tunnel = tunnels.get(tunnelUUID);
if (tunnel == null)
throw new GuacamoleResourceNotFoundException("No such tunnel.");
// Return corresponding tunnel resource
return tunnelResourceFactory.create(tunnel);
} |
python | def setup_dns(endpoint):
"""Setup site domain to route to static site"""
print("Setting up DNS...")
yass = Yass(CWD)
target = endpoint.lower()
sitename = yass.sitename
if not sitename:
raise ValueError("Missing site name")
endpoint = yass.config.get("hosting.%s" % target)
if not endpoint:
raise ValueError(
"%s endpoint is missing in the hosting config" % target.upper())
if target == "s3":
p = publisher.S3Website(sitename=sitename,
aws_access_key_id=endpoint.get("aws_access_key_id"),
aws_secret_access_key=endpoint.get("aws_secret_access_key"),
region=endpoint.get("aws_region"))
print("Setting AWS Route53 for: %s ..." % p.sitename)
p.setup_dns()
print("")
print("Yass! Route53 setup successfully!")
print("You can now visit the site at :")
print(p.sitename_endpoint)
footer() |
java | @Override
public void closeLog() throws InternalLogException
{
if (tc.isEntryEnabled())
Tr.entry(tc, "closeLog", this);
if (_logHandle != null)
{
// Only try to keypoint the recovery log if its in a valid state. If the service
// is closing the log in response to a failure, we do not want to make things worse
// by performing a keypoint operation and corrupting the log. Same applies to
// the compatibility test.
if (!failed() && !incompatible())
{
try
{
keypoint();
} catch (LogClosedException exc)
{
// The log is already closed so absorb the exception.
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.MultiScopeRecoveryLog.closeLog", "944", this);
} catch (InternalLogException exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.MultiScopeRecoveryLog.closeLog", "948", this);
markFailed(exc); /* @MD19484C */
if (tc.isEntryEnabled())
Tr.exit(tc, "closeLog", exc);
throw exc;
} catch (Throwable exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.MultiScopeRecoveryLog.closeLog", "955", this);
markFailed(exc); /* @MD19484C */
if (tc.isEntryEnabled())
Tr.exit(tc, "closeLog", "InternalLogException");
throw new InternalLogException(exc);
}
}
synchronized (this)
{
_closesRequired--;
if (_closesRequired <= 0)
{
try
{
_logHandle.closeLog();
} catch (InternalLogException exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.MultiScopeRecoveryLog.closeLog", "974", this);
markFailed(exc); /* @MD19484C */
if (tc.isEntryEnabled())
Tr.exit(tc, "closeLog", exc);
throw exc;
} catch (Throwable exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.MultiScopeRecoveryLog.closeLog", "981", this);
markFailed(exc); /* @MD19484C */
if (tc.isEntryEnabled())
Tr.exit(tc, "closeLog", "InternalLogException");
throw new InternalLogException(exc);
}
// Reset the internal state so that a subsequent open operation does not
// occurs with a "clean" environment.
_logHandle = null;
_recoverableUnits = null;
_closesRequired = 0;
_unwrittenDataSize.set(0);
_totalDataSize = 0;
_failed = false;
}
}
}
if (tc.isDebugEnabled())
Tr.debug(tc, "Closes required: " + _closesRequired);
if (tc.isEntryEnabled())
Tr.exit(tc, "closeLog");
} |
java | @Override
public boolean isSystem()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(tc, "isSystem");
SibTr.exit(tc, "isSystem", Boolean.valueOf(_isSystem));
}
return _isSystem;
} |
java | public static FunctionalType primitiveUnaryOperator(PrimitiveType type) {
switch (type.getKind()) {
case INT:
return new FunctionalType(
Type.from(IntUnaryOperator.class),
"applyAsInt",
ImmutableList.of(type),
type);
case LONG:
return new FunctionalType(
Type.from(LongUnaryOperator.class),
"applyAsLong",
ImmutableList.of(type),
type);
case DOUBLE:
return new FunctionalType(
Type.from(DoubleUnaryOperator.class),
"applyAsDouble",
ImmutableList.of(type),
type);
default:
throw new IllegalArgumentException("No primitive unary operator exists for " + type);
}
} |
java | private MatchedPair doMatch(
SessionSchedulable schedulable, long now, long nodeWait, long rackWait) {
schedulable.adjustLocalityRequirement(now, nodeWait, rackWait);
for (LocalityLevel level : neededLocalityLevels) {
if (level.isBetterThan(schedulable.getLastLocality())) {
/**
* This means that the last time we tried to schedule this session
* we could not achieve the current LocalityLevel level.
* Since this is the same iteration of the scheduler we do not need to
* try this locality level.
* The last locality level of the shcedulable is getting reset on every
* iteration of the scheduler, so we will retry the better localities
* in the next run of the scheduler.
*/
continue;
}
if (needLocalityCheck(level, nodeWait, rackWait) &&
!schedulable.isLocalityGoodEnough(level)) {
break;
}
Session session = schedulable.getSession();
synchronized (session) {
if (session.isDeleted()) {
return null;
}
int pendingRequestCount = session.getPendingRequestCountForType(type);
MatchedPair matchedPair = null;
if (nodeSnapshot == null ||
pendingRequestCount < nodeSnapshot.getRunnableHostCount()) {
matchedPair = matchNodeForSession(session, level);
} else {
matchedPair = matchSessionForNode(session, level);
}
if (matchedPair != null) {
schedulable.setLocalityLevel(level);
return matchedPair;
}
}
}
schedulable.startLocalityWait(now);
if (LOG.isDebugEnabled()) {
LOG.debug("Could not find a node for " +
schedulable.getSession().getHandle());
}
return null;
} |
python | def _init(self):
"""
Convert model metadata to class attributes.
This function is called automatically after ``define()`` in new
versions.
:return: None
"""
assert self._name
assert self._group
# self.n = 0
self.u = []
self.name = []
self.idx = []
self.uid = {}
if not self._unamey:
self._unamey = self._algebs
else:
assert len(self._unamey) == len(self._algebs)
if not self._unamex:
self._unamex = self._states
else:
assert len(self._unamex) == len(self._states)
for item in self._data.keys():
self.__dict__[item] = []
for bus in self._ac.keys():
for var in self._ac[bus]:
self.__dict__[var] = []
for node in self._dc.keys():
for var in self._dc[node]:
self.__dict__[var] = []
for var in self._states + self._algebs + self._service:
self.__dict__[var] = []
self._flags['sysbase'] = False
self._flags['allocate'] = False
self._flags['address'] = False |
python | def calculate_covariance_matrix(X):
"""Calculates the Variance-Covariance matrix
Parameters:
-----------
X : array-like, shape (m, n) - the data
Returns:
--------
variance_covariance_matrix : array-like, shape(n, n)
"""
n_features = X.shape[1]
S = np.zeros((n_features, n_features))
m = np.mean(X, axis=0).reshape(n_features, 1)
for x in X:
v = x.reshape(n_features, 1) - m
S += v @ v.T
return 1/(X.shape[0]-1) * S |
java | protected void evaluateFinalAuthentication(final AuthenticationBuilder builder,
final AuthenticationTransaction transaction,
final Set<AuthenticationHandler> authenticationHandlers) throws AuthenticationException {
if (builder.getSuccesses().isEmpty()) {
publishEvent(new CasAuthenticationTransactionFailureEvent(this, builder.getFailures(), transaction.getCredentials()));
throw new AuthenticationException(builder.getFailures(), builder.getSuccesses());
}
val authentication = builder.build();
val failures = evaluateAuthenticationPolicies(authentication, transaction, authenticationHandlers);
if (!failures.getKey()) {
publishEvent(new CasAuthenticationPolicyFailureEvent(this, builder.getFailures(), transaction, authentication));
failures.getValue().forEach(e -> handleAuthenticationException(e, e.getClass().getSimpleName(), builder));
throw new AuthenticationException(builder.getFailures(), builder.getSuccesses());
}
} |
python | def parseEntityRef(self):
"""parse ENTITY references declarations [68] EntityRef ::=
'&' Name ';' [ WFC: Entity Declared ] In a document
without any DTD, a document with only an internal DTD
subset which contains no parameter entity references, or a
document with "standalone='yes'", the Name given in the
entity reference must match that in an entity declaration,
except that well-formed documents need not declare any of
the following entities: amp, lt, gt, apos, quot. The
declaration of a parameter entity must precede any
reference to it. Similarly, the declaration of a general
entity must precede any reference to it which appears in a
default value in an attribute-list declaration. Note that
if entities are declared in the external subset or in
external parameter entities, a non-validating processor is
not obligated to read and process their declarations; for
such documents, the rule that an entity must be declared is
a well-formedness constraint only if standalone='yes'. [
WFC: Parsed Entity ] An entity reference must not contain
the name of an unparsed entity """
ret = libxml2mod.xmlParseEntityRef(self._o)
if ret is None:raise parserError('xmlParseEntityRef() failed')
__tmp = xmlEntity(_obj=ret)
return __tmp |
java | protected void defaultFindOrderBy(Query<MODEL> query) {
if (StringUtils.isNotBlank(defaultFindOrderBy)) {
// see if we should use the default orderBy clause
OrderBy<MODEL> orderBy = query.orderBy();
if (orderBy.isEmpty()) {
query.orderBy(defaultFindOrderBy);
}
}
} |
java | public static boolean is_pavargadi(String str)
{
String s1 = VarnaUtil.getAdiVarna(str);
if (is_pavarga(s1)) return true;
return false;
} |
java | public ApiResponse apiRequest(HttpMethod method,
Map<String, Object> params, Object data, String... segments) {
ApiResponse response = null;
try {
response = httpRequest(method, ApiResponse.class, params, data,
segments);
log.info("Client.apiRequest(): Response: " + response);
} catch (HttpClientErrorException e) {
log.error("Client.apiRequest(): HTTP error: "
+ e.getLocalizedMessage());
response = parse(e.getResponseBodyAsString(), ApiResponse.class);
if ((response != null) && !isEmpty(response.getError())) {
log.error("Client.apiRequest(): Response error: "
+ response.getError());
if (!isEmpty(response.getException())) {
log.error("Client.apiRequest(): Response exception: "
+ response.getException());
}
}
}
return response;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.