language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public boolean feed_publishTemplatizedAction(CharSequence titleTemplate,
Map<String, CharSequence> titleData,
CharSequence bodyTemplate,
Map<String, CharSequence> bodyData,
CharSequence bodyGeneral,
Collection<Integer> targetIds,
Collection<IFeedImage> images,
Long pageActorId)
throws FacebookException, IOException {
assert null != titleTemplate && !"".equals(titleTemplate);
FacebookMethod method = FacebookMethod.FEED_PUBLISH_TEMPLATIZED_ACTION;
ArrayList<Pair<String, CharSequence>> params =
new ArrayList<Pair<String, CharSequence>>(method.numParams());
params.add(new Pair<String, CharSequence>("title_template", titleTemplate));
if (null != titleData && !titleData.isEmpty()) {
JSONObject titleDataJson = new JSONObject();
titleDataJson.putAll(titleData);
params.add(new Pair<String, CharSequence>("title_data", titleDataJson.toString()));
}
if (null != bodyTemplate && !"".equals(bodyTemplate)) {
params.add(new Pair<String, CharSequence>("body_template", bodyTemplate));
if (null != bodyData && !bodyData.isEmpty()) {
JSONObject bodyDataJson = new JSONObject();
bodyDataJson.putAll(bodyData);
params.add(new Pair<String, CharSequence>("body_data", bodyDataJson.toString()));
}
}
if (null != bodyGeneral && !"".equals(bodyGeneral)) {
params.add(new Pair<String, CharSequence>("body_general", bodyGeneral));
}
if (null != targetIds && !targetIds.isEmpty()) {
params.add(new Pair<String, CharSequence>("target_ids", delimit(targetIds)));
}
if (null != pageActorId) {
params.add(new Pair<String, CharSequence>("page_actor_id", pageActorId.toString()));
}
handleFeedImages(params, images);
return extractBoolean(this.callMethod(method, params));
} |
java | public void doProcess(InputStream in, PrintWriter out, Map<String, Object> properties)
throws RemoteException
{
String strCommand = this.getProperty(REMOTE_COMMAND, properties);
try {
if (OPEN.equals(strCommand))
{
String strKeyArea = this.getNextStringParam(in, KEY, properties);
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
boolean bDirection = this.getNextBooleanParam(in, DIRECTION, properties);
String strFields = this.getNextStringParam(in, FIELDS, properties);
Object objInitialKey = this.getNextObjectParam(in, INITIAL_KEY, properties);
Object objEndKey = this.getNextObjectParam(in, END_KEY, properties);
byte[] byBehaviorData = (byte[])this.getNextObjectParam(in, BEHAVIOR_DATA, properties);
((RemoteTable)m_remoteObject).open(strKeyArea, iOpenMode, bDirection, strFields, objInitialKey, objEndKey, byBehaviorData);
}
else if (ADD.equals(strCommand))
{
Object objData = this.getNextObjectParam(in, DATA, properties);
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
Object objReturn = ((RemoteTable)m_remoteObject).add(objData, iOpenMode);
this.setReturnObject(out, objReturn);
}
else if (EDIT.equals(strCommand))
{
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
int iErrorCode = ((RemoteTable)m_remoteObject).edit(iOpenMode);
this.setReturnObject(out, new Integer(iErrorCode));
}
else if (SET.equals(strCommand))
{
Object objData = this.getNextObjectParam(in, DATA, properties);
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
((RemoteTable)m_remoteObject).set(objData, iOpenMode);
}
else if (REMOVE.equals(strCommand))
{
Object objData = this.getNextObjectParam(in, DATA, properties);
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
((RemoteTable)m_remoteObject).remove(objData, iOpenMode);
}
else if (DO_MOVE.equals(strCommand))
{
int iRelPosition = this.getNextIntParam(in, POSITION, properties);
int iCount = this.getNextIntParam(in, COUNT, properties);
Object objReturn = ((RemoteTable)m_remoteObject).doMove(iRelPosition, iCount);
this.setReturnObject(out, objReturn);
}
else if (SEEK.equals(strCommand))
{
String strSeekSign = this.getNextStringParam(in, SIGN, properties);
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
String strKeyArea = this.getNextStringParam(in, KEY, properties);
String strFields = this.getNextStringParam(in, FIELDS, properties);
Object objKeyData = this.getNextObjectParam(in, KEY_DATA, properties);
Object objReturn = ((RemoteTable)m_remoteObject).seek(strSeekSign, iOpenMode, strKeyArea, strFields, objKeyData);
this.setReturnObject(out, objReturn);
}
else if (DO_SET_HANDLE.equals(strCommand))
{
Object bookmark = this.getNextObjectParam(in, BOOKMARK, properties);
int iOpenMode = this.getNextIntParam(in, OPEN_MODE, properties);
String strFields = this.getNextStringParam(in, FIELDS, properties);
int iHandleType = this.getNextIntParam(in, TYPE, properties);
Object objReturn = ((RemoteTable)m_remoteObject).doSetHandle(bookmark, iOpenMode, strFields, iHandleType);
this.setReturnObject(out, objReturn);
}
else if (GET_LAST_MODIFIED.equals(strCommand))
{
int iHandleType = this.getNextIntParam(in, TYPE, properties);
Object objReturn = ((RemoteTable)m_remoteObject).getLastModified(iHandleType);
this.setReturnObject(out, objReturn);
}
else if (GET.equals(strCommand))
{
int iRowIndex = this.getNextIntParam(in, INDEX, properties);
int iRowCount = this.getNextIntParam(in, COUNT, properties);
Object objReturn = ((RemoteTable)m_remoteObject).get(iRowIndex, iRowCount);
this.setReturnObject(out, objReturn);
}
else if (SET_REMOTE_PROPERTY.equals(strCommand))
{
String strProperty = this.getNextStringParam(in, KEY, properties);
String strValue = this.getNextStringParam(in, VALUE, properties);
((RemoteTable)m_remoteObject).setRemoteProperty(strProperty, strValue);
}
else if (MAKE_FIELD_LIST.equals(strCommand))
{
String strFieldsToInclude = this.getNextStringParam(in, FIELDS, properties);
Object objReturn = ((RemoteTable)m_remoteObject).makeFieldList(strFieldsToInclude);
this.setReturnObject(out, objReturn);
}
else if (GET_REMOTE_DATABASE.equals(strCommand))
{
Map<String,Object> propIn = this.getNextPropertiesParam(in, PROPERTIES, properties);
if (propIn != null)
properties.putAll(propIn);
RemoteDatabase remoteDatabase = ((RemoteTable)m_remoteObject).getRemoteDatabase(properties);
// First, see if this is in my list already?
String strDBID = this.find(remoteDatabase);
if (strDBID == null)
{
strDBID = this.add(new DatabaseHolder((TaskHolder)this.getParentHolder(), remoteDatabase));
}
this.setReturnString(out, strDBID);
}
else
super.doProcess(in, out, properties);
} catch (DBException ex) {
this.setReturnObject(out, ex);
} catch (Exception ex) {
this.setReturnObject(out, ex);
}
} |
java | public void setUtc(String utc) {
if (utc == null) {
this.utc = null;
} else {
if (utc.length() != 20) {
throw new IllegalArgumentException("Must be of the form YYYYMMDDTHHMMSS.sssZ");
}
}
} |
python | def single_line_stdout(cmd, expected_errors=(), shell=True, sudo=False, quiet=False):
"""
Runs a command and returns the first line of the result, that would be written to `stdout`, as a string.
The output itself can be suppressed.
:param cmd: Command to run.
:type cmd: unicode
:param expected_errors: If the return code is non-zero, but found in this tuple, it will be ignored. ``None`` is
returned in this case.
:type expected_errors: tuple
:param shell: Use a shell.
:type shell: bool
:param sudo: Use `sudo`.
:type sudo: bool
:param quiet: If set to ``True``, does not show any output.
:type quiet: bool
:return: The result of the command as would be written to `stdout`.
:rtype: unicode
"""
return single_line(stdout_result(cmd, expected_errors, shell, sudo, quiet)) |
python | def make_pipeline(context):
"""
Create our pipeline.
"""
# Filter for primary share equities. IsPrimaryShare is a built-in filter.
primary_share = IsPrimaryShare()
# Not when-issued equities.
not_wi = ~IEXCompany.symbol.latest.endswith('.WI')
# Equities without LP in their name, .matches does a match using a regular
# expression
not_lp_name = ~IEXCompany.companyName.latest.matches('.* L[. ]?P.?$')
# Equities whose most recent Morningstar market cap is not null have
# fundamental data and therefore are not ETFs.
have_market_cap = IEXKeyStats.marketcap.latest >= 1
# At least a certain price
price = USEquityPricing.close.latest
AtLeastPrice = (price >= context.MyLeastPrice)
AtMostPrice = (price <= context.MyMostPrice)
# Filter for stocks that pass all of our previous filters.
tradeable_stocks = (
primary_share
& not_wi
& not_lp_name
& have_market_cap
& AtLeastPrice
& AtMostPrice
)
LowVar = 6
HighVar = 40
log.info(
'''
Algorithm initialized variables:
context.MaxCandidates %s
LowVar %s
HighVar %s''' %
(context.MaxCandidates, LowVar, HighVar))
# High dollar volume filter.
base_universe = AverageDollarVolume(
window_length=20,
mask=tradeable_stocks
).percentile_between(LowVar, HighVar)
# Short close price average.
ShortAvg = SimpleMovingAverage(
inputs=[USEquityPricing.close],
window_length=3,
mask=base_universe
)
# Long close price average.
LongAvg = SimpleMovingAverage(
inputs=[USEquityPricing.close],
window_length=45,
mask=base_universe
)
percent_difference = (ShortAvg - LongAvg) / LongAvg
# Filter to select securities to long.
stocks_worst = percent_difference.bottom(context.MaxCandidates)
securities_to_trade = (stocks_worst)
return Pipeline(
columns={
'stocks_worst': stocks_worst
},
screen=(securities_to_trade),
) |
java | public int fetchEntries(int tableIndex, int size, List<Map.Entry<K, V>> entries) {
final long now = Clock.currentTimeMillis();
final Segment<K, V> segment = segments[0];
final HashEntry<K, V>[] currentTable = segment.table;
int nextTableIndex;
if (tableIndex >= 0 && tableIndex < segment.table.length) {
nextTableIndex = tableIndex;
} else {
nextTableIndex = currentTable.length - 1;
}
int counter = 0;
while (nextTableIndex >= 0 && counter < size) {
HashEntry<K, V> nextEntry = currentTable[nextTableIndex--];
while (nextEntry != null) {
if (nextEntry.key() != null) {
final V value = nextEntry.value();
if (isValidForFetching(value, now)) {
K key = nextEntry.key();
entries.add(new AbstractMap.SimpleEntry<K, V>(key, value));
counter++;
}
}
nextEntry = nextEntry.next;
}
}
return nextTableIndex;
} |
java | private ImmutableMap<Predicate,ImmutableList<TermType>> extractCastTypeMap(
Multimap<Predicate, CQIE> ruleIndex, List<Predicate> predicatesInBottomUp,
ImmutableMap<CQIE, ImmutableList<Optional<TermType>>> termTypeMap, DBMetadata metadata) {
// Append-only
Map<Predicate,ImmutableList<TermType>> mutableCastMap = Maps.newHashMap();
for (Predicate predicate : predicatesInBottomUp) {
ImmutableList<TermType> castTypes = inferCastTypes(predicate, ruleIndex.get(predicate), termTypeMap,
mutableCastMap,metadata);
mutableCastMap.put(predicate, castTypes);
}
return ImmutableMap.copyOf(mutableCastMap);
} |
python | def needs_invalidation(self, requirement, cache_file):
"""
Check whether a cached binary distribution needs to be invalidated.
:param requirement: A :class:`.Requirement` object.
:param cache_file: The pathname of a cached binary distribution (a string).
:returns: :data:`True` if the cached binary distribution needs to be
invalidated, :data:`False` otherwise.
"""
if self.config.trust_mod_times:
return requirement.last_modified > os.path.getmtime(cache_file)
else:
checksum = self.recall_checksum(cache_file)
return checksum and checksum != requirement.checksum |
python | def request_frame(self):
"""Construct initiating frame."""
self.session_id = get_new_session_id()
return FrameCommandSendRequest(node_ids=[self.node_id], parameter=self.parameter, session_id=self.session_id) |
java | private void _createZeroArgsMethodExpression(MethodExpression methodExpression)
{
ExpressionFactory expressionFactory = getFacesContext().getApplication().getExpressionFactory();
this.methodExpressionZeroArg = expressionFactory.createMethodExpression(getElContext(),
methodExpression.getExpressionString(), Void.class, EMPTY_CLASS_ARRAY);
} |
python | async def scp_to(self, source, destination, user='ubuntu', proxy=False,
scp_opts=''):
"""Transfer files to this machine.
:param str source: Local path of file(s) to transfer
:param str destination: Remote destination of transferred files
:param str user: Remote username
:param bool proxy: Proxy through the Juju API server
:param scp_opts: Additional options to the `scp` command
:type scp_opts: str or list
"""
if proxy:
raise NotImplementedError('proxy option is not implemented')
address = self.dns_name
destination = '%s@%s:%s' % (user, address, destination)
await self._scp(source, destination, scp_opts) |
java | public void poppush( int n, Frame ary, String key) {
addRef(ary);
for( int i=0; i<n; i++ ) {
assert _sp > 0;
_sp--;
_fcn[_sp] = subRef(_fcn[_sp]);
_ary[_sp] = subRef(_ary[_sp], _key[_sp]);
}
push(1); _ary[_sp-1] = ary; _key[_sp-1] = key;
assert check_all_refcnts();
} |
python | def make_module_to_builder_dict(datasets=None):
"""Get all builders organized by module in nested dicts."""
# pylint: disable=g-long-lambda
# dict to hold tfds->image->mnist->[builders]
module_to_builder = collections.defaultdict(
lambda: collections.defaultdict(
lambda: collections.defaultdict(list)))
# pylint: enable=g-long-lambda
if datasets:
builders = [tfds.builder(name) for name in datasets]
else:
builders = [
tfds.builder(name)
for name in tfds.list_builders()
if name not in BUILDER_BLACKLIST
] + [tfds.builder("image_label_folder", dataset_name="image_label_folder")]
for builder in builders:
mod_name = builder.__class__.__module__
modules = mod_name.split(".")
if "testing" in modules:
continue
current_mod_ctr = module_to_builder
for mod in modules:
current_mod_ctr = current_mod_ctr[mod]
current_mod_ctr.append(builder)
module_to_builder = module_to_builder["tensorflow_datasets"]
return module_to_builder |
python | def pad_matrix(self, matrix, pad_value=0):
"""
Pad a possibly non-square matrix to make it square.
:Parameters:
matrix : list of lists
matrix to pad
pad_value : int
value to use to pad the matrix
:rtype: list of lists
:return: a new, possibly padded, matrix
"""
max_columns = 0
total_rows = len(matrix)
for row in matrix:
max_columns = max(max_columns, len(row))
total_rows = max(max_columns, total_rows)
new_matrix = []
for row in matrix:
row_len = len(row)
new_row = row[:]
if total_rows > row_len:
# Row too short. Pad it.
new_row += [pad_value] * (total_rows - row_len)
new_matrix += [new_row]
while len(new_matrix) < total_rows:
new_matrix += [[pad_value] * total_rows]
return new_matrix |
java | public BinaryDecoder createBinaryDecoder(final Encoding encoding)
throws UnsupportedEncodingException {
if (Encoding.QUOTED_PRINTABLE.equals(encoding)) {
return new QuotedPrintableCodec();
}
else if (Encoding.BASE64.equals(encoding)) {
return new Base64();
}
throw new UnsupportedEncodingException(MessageFormat.format(UNSUPPORTED_ENCODING_MESSAGE,
encoding));
} |
python | def from_clause(cls, clause):
""" Factory method """
(field, operator, val) = clause
return cls(field, operator, field_or_value(val)) |
java | @BetaApi
public final ListDisksPagedResponse listDisks(ProjectZoneName zone) {
ListDisksHttpRequest request =
ListDisksHttpRequest.newBuilder().setZone(zone == null ? null : zone.toString()).build();
return listDisks(request);
} |
java | public DrawerProfile setRoundedAvatar(Context context, Bitmap image) {
return setAvatar(new RoundedAvatarDrawable(new BitmapDrawable(context.getResources(), image).getBitmap()));
} |
python | def read(self):
"""Return a single byte from the output buffer
"""
if self._output_buffer:
b, self._output_buffer = (self._output_buffer[0:1],
self._output_buffer[1:])
return b
return b'' |
java | SearchResult search(@NonNull ByteArraySegment key, int startPos) {
// Positions here are not indices into "source", rather they are entry positions, which is why we always need
// to adjust by using entryLength.
int endPos = getCount();
Preconditions.checkArgument(startPos <= endPos, "startPos must be non-negative and smaller than the number of items.");
while (startPos < endPos) {
// Locate the Key in the middle.
int midPos = startPos + (endPos - startPos) / 2;
// Compare it to the sought key.
int c = KEY_COMPARATOR.compare(key.array(), key.arrayOffset(),
this.data.array(), this.data.arrayOffset() + midPos * this.config.entryLength, this.config.keyLength);
if (c == 0) {
// Exact match.
return new SearchResult(midPos, true);
} else if (c < 0) {
// Search again to the left.
endPos = midPos;
} else {
// Search again to the right.
startPos = midPos + 1;
}
}
// Return an inexact search result with the position where the sought key would have been.
return new SearchResult(startPos, false);
} |
java | public org.osmdroid.views.overlay.Polygon toCurvePolygon(CurvePolygon curvePolygon) {
org.osmdroid.views.overlay.Polygon polygonOptions = new org.osmdroid.views.overlay.Polygon();
List<GeoPoint> pts = new ArrayList<>();
List<Curve> rings = curvePolygon.getRings();
List<List<GeoPoint>> holes = new ArrayList<>();
if (!rings.isEmpty()) {
Double z = null;
// Add the polygon points
Curve curve = rings.get(0);
if (curve instanceof CompoundCurve) {
CompoundCurve compoundCurve = (CompoundCurve) curve;
for (LineString lineString : compoundCurve.getLineStrings()) {
for (Point point : lineString.getPoints()) {
GeoPoint latLng = toLatLng(point);
pts.add(latLng);
}
}
} else if (curve instanceof LineString) {
LineString lineString = (LineString) curve;
for (Point point : lineString.getPoints()) {
GeoPoint latLng = toLatLng(point);
pts.add(latLng);
}
} else {
throw new GeoPackageException("Unsupported Curve Type: "
+ curve.getClass().getSimpleName());
}
// Add the holes
for (int i = 1; i < rings.size(); i++) {
Curve hole = rings.get(i);
List<GeoPoint> holeLatLngs = new ArrayList<GeoPoint>();
if (hole instanceof CompoundCurve) {
CompoundCurve holeCompoundCurve = (CompoundCurve) hole;
for (LineString holeLineString : holeCompoundCurve.getLineStrings()) {
for (Point point : holeLineString.getPoints()) {
GeoPoint latLng = toLatLng(point);
holeLatLngs.add(latLng);
}
}
} else if (hole instanceof LineString) {
LineString holeLineString = (LineString) hole;
for (Point point : holeLineString.getPoints()) {
GeoPoint latLng = toLatLng(point);
holeLatLngs.add(latLng);
if (point.hasZ()) {
z = (z == null) ? point.getZ() : Math.max(z,
point.getZ());
}
}
} else {
throw new GeoPackageException("Unsupported Curve Hole Type: "
+ hole.getClass().getSimpleName());
}
holes.add(holeLatLngs);
}
}
polygonOptions.setHoles(holes);
polygonOptions.setPoints(pts);
return polygonOptions;
} |
java | static void makeTypeInfoBits(ErrorCorrectionLevel ecLevel, int maskPattern, BitArray bits)
throws WriterException {
if (!QRCode.isValidMaskPattern(maskPattern)) {
throw new WriterException("Invalid mask pattern");
}
int typeInfo = (ecLevel.getBits() << 3) | maskPattern;
bits.appendBits(typeInfo, 5);
int bchCode = calculateBCHCode(typeInfo, TYPE_INFO_POLY);
bits.appendBits(bchCode, 10);
BitArray maskBits = new BitArray();
maskBits.appendBits(TYPE_INFO_MASK_PATTERN, 15);
bits.xor(maskBits);
if (bits.getSize() != 15) { // Just in case.
throw new WriterException("should not happen but we got: " + bits.getSize());
}
} |
java | static Object coerceTypeImpl(Class<?> type, Object value)
{
if (value != null && value.getClass() == type) {
return value;
}
switch (getJSTypeCode(value)) {
case JSTYPE_NULL:
// raise error if type.isPrimitive()
if (type.isPrimitive()) {
reportConversionError(value, type);
}
return null;
case JSTYPE_UNDEFINED:
if (type == ScriptRuntime.StringClass ||
type == ScriptRuntime.ObjectClass) {
return "undefined";
}
reportConversionError("undefined", type);
break;
case JSTYPE_BOOLEAN:
// Under LC3, only JS Booleans can be coerced into a Boolean value
if (type == Boolean.TYPE ||
type == ScriptRuntime.BooleanClass ||
type == ScriptRuntime.ObjectClass) {
return value;
}
else if (type == ScriptRuntime.StringClass) {
return value.toString();
}
else {
reportConversionError(value, type);
}
break;
case JSTYPE_NUMBER:
if (type == ScriptRuntime.StringClass) {
return ScriptRuntime.toString(value);
}
else if (type == ScriptRuntime.ObjectClass) {
Context context = Context.getCurrentContext();
if(context.hasFeature(Context.FEATURE_INTEGER_WITHOUT_DECIMAL_PLACE)) {
//to process numbers like 2.0 as 2 without decimal place
long roundedValue = Math.round(toDouble(value));
if(roundedValue == toDouble(value)) {
return coerceToNumber(Long.TYPE, value);
}
}
return coerceToNumber(Double.TYPE, value);
}
else if ((type.isPrimitive() && type != Boolean.TYPE) ||
ScriptRuntime.NumberClass.isAssignableFrom(type)) {
return coerceToNumber(type, value);
}
else {
reportConversionError(value, type);
}
break;
case JSTYPE_STRING:
if (type == ScriptRuntime.StringClass || type.isInstance(value)) {
return value.toString();
}
else if (type == Character.TYPE
|| type == ScriptRuntime.CharacterClass)
{
// Special case for converting a single char string to a
// character
// Placed here because it applies *only* to JS strings,
// not other JS objects converted to strings
if (((CharSequence)value).length() == 1) {
return Character.valueOf(((CharSequence)value).charAt(0));
}
return coerceToNumber(type, value);
}
else if ((type.isPrimitive() && type != Boolean.TYPE)
|| ScriptRuntime.NumberClass.isAssignableFrom(type))
{
return coerceToNumber(type, value);
}
else {
reportConversionError(value, type);
}
break;
case JSTYPE_JAVA_CLASS:
if (value instanceof Wrapper) {
value = ((Wrapper)value).unwrap();
}
if (type == ScriptRuntime.ClassClass ||
type == ScriptRuntime.ObjectClass) {
return value;
}
else if (type == ScriptRuntime.StringClass) {
return value.toString();
}
else {
reportConversionError(value, type);
}
break;
case JSTYPE_JAVA_OBJECT:
case JSTYPE_JAVA_ARRAY:
if (value instanceof Wrapper) {
value = ((Wrapper)value).unwrap();
}
if (type.isPrimitive()) {
if (type == Boolean.TYPE) {
reportConversionError(value, type);
}
return coerceToNumber(type, value);
}
if (type == ScriptRuntime.StringClass) {
return value.toString();
}
if (type.isInstance(value)) {
return value;
}
reportConversionError(value, type);
break;
case JSTYPE_OBJECT:
if (type == ScriptRuntime.StringClass) {
return ScriptRuntime.toString(value);
}
else if (type.isPrimitive()) {
if (type == Boolean.TYPE) {
reportConversionError(value, type);
}
return coerceToNumber(type, value);
}
else if (type.isInstance(value)) {
return value;
}
else if (type == ScriptRuntime.DateClass
&& value instanceof NativeDate)
{
double time = ((NativeDate)value).getJSTimeValue();
// XXX: This will replace NaN by 0
return new Date((long)time);
}
else if (type.isArray() && value instanceof NativeArray) {
// Make a new java array, and coerce the JS array components
// to the target (component) type.
NativeArray array = (NativeArray) value;
long length = array.getLength();
Class<?> arrayType = type.getComponentType();
Object Result = Array.newInstance(arrayType, (int)length);
for (int i = 0 ; i < length ; ++i) {
try {
Array.set(Result, i, coerceTypeImpl(
arrayType, array.get(i, array)));
}
catch (EvaluatorException ee) {
reportConversionError(value, type);
}
}
return Result;
}
else if (value instanceof Wrapper) {
value = ((Wrapper)value).unwrap();
if (type.isInstance(value))
return value;
reportConversionError(value, type);
}
else if (type.isInterface() && (value instanceof NativeObject
|| value instanceof NativeFunction)) {
// Try to use function/object as implementation of Java interface.
return createInterfaceAdapter(type, (ScriptableObject) value);
} else {
reportConversionError(value, type);
}
break;
}
return value;
} |
java | public static String printToUnicodeString(final UnknownFieldSet fields) {
try {
final StringBuilder text = new StringBuilder();
UNICODE_PRINTER.printUnknownFields(fields, new TextGenerator(text));
return text.toString();
} catch (IOException e) {
throw new IllegalStateException(e);
}
} |
python | def add_mpl_colorscale(fig, heatmap_gs, ax_map, params, title=None):
"""Add colour scale to heatmap."""
# Set tick intervals
cbticks = [params.vmin + e * params.vdiff for e in (0, 0.25, 0.5, 0.75, 1)]
if params.vmax > 10:
exponent = int(floor(log10(params.vmax))) - 1
cbticks = [int(round(e, -exponent)) for e in cbticks]
scale_subplot = gridspec.GridSpecFromSubplotSpec(
1, 3, subplot_spec=heatmap_gs[0, 0], wspace=0.0, hspace=0.0
)
scale_ax = fig.add_subplot(scale_subplot[0, 1])
cbar = fig.colorbar(ax_map, scale_ax, ticks=cbticks)
if title:
cbar.set_label(title, fontsize=6)
cbar.ax.yaxis.set_ticks_position("left")
cbar.ax.yaxis.set_label_position("left")
cbar.ax.tick_params(labelsize=6)
cbar.outline.set_linewidth(0)
return cbar |
java | private void extractIndirectionTables(DescriptorRepository model, Database schema)
{
HashMap indirectionTables = new HashMap();
// first we gather all participants for each m:n relationship
for (Iterator classDescIt = model.getDescriptorTable().values().iterator(); classDescIt.hasNext();)
{
ClassDescriptor classDesc = (ClassDescriptor)classDescIt.next();
for (Iterator collDescIt = classDesc.getCollectionDescriptors().iterator(); collDescIt.hasNext();)
{
CollectionDescriptor collDesc = (CollectionDescriptor)collDescIt.next();
String indirTable = collDesc.getIndirectionTable();
if ((indirTable != null) && (indirTable.length() > 0))
{
Set columns = (Set)indirectionTables.get(indirTable);
if (columns == null)
{
columns = new HashSet();
indirectionTables.put(indirTable, columns);
}
columns.addAll(Arrays.asList(collDesc.getFksToThisClass()));
columns.addAll(Arrays.asList(collDesc.getFksToItemClass()));
}
}
}
if (indirectionTables.isEmpty())
{
// nothing to do
return;
}
for (Iterator it = indirectionTables.keySet().iterator(); it.hasNext();)
{
String tableName = (String)it.next();
Set columns = (Set)indirectionTables.get(tableName);
String elementName = tableName;
for (Iterator classDescIt = model.getDescriptorTable().values().iterator(); classDescIt.hasNext();)
{
ClassDescriptor classDesc = (ClassDescriptor)classDescIt.next();
if (tableName.equals(classDesc.getFullTableName()))
{
elementName = getElementName(classDesc);
FieldDescriptor[] fieldDescs = classDesc.getFieldDescriptions();
if (fieldDescs != null)
{
for (int idx = 0; idx < fieldDescs.length; idx++)
{
columns.remove(fieldDescs[idx].getColumnName());
}
}
}
}
Table mappedTable = getTableFor(elementName);
Map columnsMap = getColumnsFor(elementName);
Map requiredAttributes = getRequiredAttributes(elementName);
if (mappedTable == null)
{
mappedTable = schema.findTable(elementName);
if (mappedTable == null)
{
continue;
}
columnsMap = new TreeMap();
requiredAttributes = new HashMap();
_elementToTable.put(elementName, mappedTable);
_elementToColumnMap.put(elementName, columnsMap);
_elementToRequiredAttributesMap.put(elementName, requiredAttributes);
}
for (Iterator columnIt = columns.iterator(); columnIt.hasNext();)
{
String columnName = (String)columnIt.next();
Column column = mappedTable.findColumn(columnName);
if (column != null)
{
columnsMap.put(columnName, column);
requiredAttributes.put(columnName, Boolean.TRUE);
}
}
}
} |
python | def ellipse(self, x,y,w,h,style=''):
"Draw a ellipse"
if(style=='F'):
op='f'
elif(style=='FD' or style=='DF'):
op='B'
else:
op='S'
cx = x + w/2.0
cy = y + h/2.0
rx = w/2.0
ry = h/2.0
lx = 4.0/3.0*(math.sqrt(2)-1)*rx
ly = 4.0/3.0*(math.sqrt(2)-1)*ry
self._out(sprintf('%.2f %.2f m %.2f %.2f %.2f %.2f %.2f %.2f c',
(cx+rx)*self.k, (self.h-cy)*self.k,
(cx+rx)*self.k, (self.h-(cy-ly))*self.k,
(cx+lx)*self.k, (self.h-(cy-ry))*self.k,
cx*self.k, (self.h-(cy-ry))*self.k))
self._out(sprintf('%.2f %.2f %.2f %.2f %.2f %.2f c',
(cx-lx)*self.k, (self.h-(cy-ry))*self.k,
(cx-rx)*self.k, (self.h-(cy-ly))*self.k,
(cx-rx)*self.k, (self.h-cy)*self.k))
self._out(sprintf('%.2f %.2f %.2f %.2f %.2f %.2f c',
(cx-rx)*self.k, (self.h-(cy+ly))*self.k,
(cx-lx)*self.k, (self.h-(cy+ry))*self.k,
cx*self.k, (self.h-(cy+ry))*self.k))
self._out(sprintf('%.2f %.2f %.2f %.2f %.2f %.2f c %s',
(cx+lx)*self.k, (self.h-(cy+ry))*self.k,
(cx+rx)*self.k, (self.h-(cy+ly))*self.k,
(cx+rx)*self.k, (self.h-cy)*self.k,
op)) |
java | protected void reportFailure (Throwable caught)
{
java.util.logging.Logger.getLogger("PagedWidget").warning("Failure to page: " + caught);
} |
java | public void write(Model project, Document document, String encoding, OutputStreamWriter writer)
throws java.io.IOException
{
Format format = Format.getRawFormat().setEncoding(encoding).setLineSeparator(LS);
write(project, document, writer, format);
} |
java | private String getInternalName(Klass k) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
klass.printValueOn(new PrintStream(bos));
// '*' is used to denote VM internal klasses.
return "* " + bos.toString();
} |
java | public Photos getPhotos(String galleryId, EnumSet<JinxConstants.PhotoExtras> extras) throws JinxException {
JinxUtils.validateParams(galleryId);
Map<String, String> params = new TreeMap<>();
params.put("method", "flickr.galleries.getPhotos");
params.put("gallery_id", galleryId);
if (!JinxUtils.isNullOrEmpty(extras)) {
params.put("extras", JinxUtils.buildCommaDelimitedList(extras));
}
return jinx.flickrGet(params, Photos.class);
} |
python | def convert_logistic_regression_output(node, **kwargs):
"""Map MXNet's SoftmaxOutput operator attributes to onnx's Softmax operator
and return the created node.
"""
name = node["name"]
input1_idx = kwargs["index_lookup"][node["inputs"][0][0]]
input1 = kwargs["proc_nodes"][input1_idx]
sigmoid_node = onnx.helper.make_node(
"Sigmoid",
[input1.name],
[name],
name=name
)
return [sigmoid_node] |
java | private void put(PrintStream p, String s, int column, int colSpan, BandElement bandElement) {
if (s == null) {
// nl();
put(p, "", column, colSpan, bandElement);
return;
}
int size = 0;
if (colSpan > 1) {
for (int i=column; i<column+colSpan; i++) {
size += columnWidth[i];
}
} else {
size = columnWidth[column];
}
if ((bandElement != null) && bandElement.getHorizontalAlign() == BandElement.RIGHT) {
p.print(String.format("%" + size + "s", s));
} else {
p.print(String.format("%-" + size + "s", s));
}
} |
python | def view_task_info(token, dstore):
"""
Display statistical information about the tasks performance.
It is possible to get full information about a specific task
with a command like this one, for a classical calculation::
$ oq show task_info:classical
"""
args = token.split(':')[1:] # called as task_info:task_name
if args:
[task] = args
array = dstore['task_info/' + task].value
rduration = array['duration'] / array['weight']
data = util.compose_arrays(rduration, array, 'rduration')
data.sort(order='duration')
return rst_table(data)
data = ['operation-duration mean stddev min max outputs'.split()]
for task in dstore['task_info']:
val = dstore['task_info/' + task]['duration']
if len(val):
data.append(stats(task, val))
if len(data) == 1:
return 'Not available'
return rst_table(data) |
java | @Override
public void doSessionCreated(ManagementContext managementContext,
ServiceManagementBean serviceBean,
IoSessionEx session,
ManagementSessionType managementSessionType) throws Exception {
SessionManagementBean sessionBean = managementContext.addSessionManagementBean(serviceBean, session);
serviceBean.storeSessionManagementBean(sessionBean);
managementContext.getManagementSessionStrategy().doSessionCreated(sessionBean);
super.doSessionCreated(managementContext, serviceBean, session, managementSessionType);
} |
java | private void unlockInherited(final String absoluteResourcename) throws CmsException {
CmsObject cms = getCms();
CmsLock parentLock = getParentLock(absoluteResourcename);
if (!parentLock.isNullLock()) {
if (parentLock.isInherited()) {
unlockInherited(parentLock.getResourceName());
} else {
if (!parentLock.isLockableBy(cms.getRequestContext().getCurrentUser())) {
cms.changeLock(cms.getRequestContext().removeSiteRoot(parentLock.getResourceName()));
}
cms.unlockResource(cms.getRequestContext().removeSiteRoot(parentLock.getResourceName()));
}
}
} |
python | def uid(self):
"""Return the user id that the process will run as
:rtype: int
"""
if not self._uid:
if self.config.daemon.user:
self._uid = pwd.getpwnam(self.config.daemon.user).pw_uid
else:
self._uid = os.getuid()
return self._uid |
java | public ServiceFuture<List<BlobContainerInner>> listStorageContainersAsync(final String resourceGroupName, final String accountName, final String storageAccountName, final ListOperationCallback<BlobContainerInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listStorageContainersSinglePageAsync(resourceGroupName, accountName, storageAccountName),
new Func1<String, Observable<ServiceResponse<Page<BlobContainerInner>>>>() {
@Override
public Observable<ServiceResponse<Page<BlobContainerInner>>> call(String nextPageLink) {
return listStorageContainersNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
} |
python | def quartus_prop(self, buff: List[str], intfName: str, name: str, value,
escapeStr=True):
"""
Set property on interface in Quartus TCL
:param buff: line buffer for output
:param intfName: name of interface to set property on
:param name: property name
:param value: property value
:param escapeStr: flag, if True put string properties to extra ""
"""
if escapeStr and isinstance(value, str):
value = '"%s"' % value
elif isinstance(value, bool):
value = str(value).lower()
else:
value = str(value)
buff.append("set_interface_property %s %s %s" %
(intfName, name, value)) |
python | def create_domain(self, domain, ipaddr, params=None):
''' /v1/dns/create_domain
POST - account
Create a domain name in DNS
Link: https://www.vultr.com/api/#dns_create_domain
'''
params = update_params(params, {
'domain': domain,
'ip': ipaddr
})
return self.request('/v1/dns/create_domain', params, 'POST') |
java | public QueryBuilder populateFilterBuilder(Expression condtionalExp, EntityMetadata m)
{
log.info("Populating filter for expression: " + condtionalExp);
QueryBuilder filter = null;
if (condtionalExp instanceof SubExpression)
{
filter = populateFilterBuilder(((SubExpression) condtionalExp).getExpression(), m);
}
else if (condtionalExp instanceof ComparisonExpression)
{
filter = getFilter(populateFilterClause((ComparisonExpression) condtionalExp), m);
}
else if (condtionalExp instanceof BetweenExpression)
{
filter = populateBetweenFilter((BetweenExpression) condtionalExp, m);
}
else if (condtionalExp instanceof LogicalExpression)
{
filter = populateLogicalFilterBuilder(condtionalExp, m);
}
else if (condtionalExp instanceof LikeExpression)
{
filter = populateLikeQuery((LikeExpression) condtionalExp, m);
}
else if (condtionalExp instanceof InExpression)
{
filter = populateInQuery((InExpression) condtionalExp, m);
}
else
{
log.error(condtionalExp.toParsedText() + "found in where clause. Not supported in elasticsearch.");
throw new KunderaException(condtionalExp.toParsedText() + " not supported in ElasticSearch");
}
log.debug("Following is the populated filter for required query: " + filter);
return filter;
} |
python | def linspace_pix(self, start=None, stop=None, pixel_step=1, y_vs_x=None):
"""Return x,y values evaluated with a given pixel step.
The returned values are computed within the corresponding
bounding box of the line.
Parameters
----------
start : float
Minimum pixel coordinate to evaluate the independent
variable.
stop : float
Maximum pixel coordinate to evaluate the independent
variable.
pixel_step : float
Pixel step employed to evaluate the independent variable.
y_vs_x : bool
If True, the polynomial fit is assumed to be Y vs X.
Otherwise, X vs Y is employed.
Returns
-------
x : 1d numpy array
X coordinates.
y : 1d numpy array
Y coordinates.
"""
if y_vs_x:
if start is None:
xmin = self.bb_nc1_orig
else:
xmin = start
if stop is None:
xmax = self.bb_nc2_orig
else:
xmax = stop
num = int(float(xmax-xmin+1)/float(pixel_step)+0.5)
x = np.linspace(start=xmin, stop=xmax, num=num)
y = self.poly_funct(x)
else:
if start is None:
ymin = self.bb_ns1_orig
else:
ymin = start
if stop is None:
ymax = self.bb_ns2_orig
else:
ymax = stop
num = int(float(ymax-ymin+1)/float(pixel_step)+0.5)
y = np.linspace(start=ymin, stop=ymax, num=num)
x = self.poly_funct(y)
return x, y |
java | @Indexable(type = IndexableType.REINDEX)
@Override
public CommerceNotificationTemplateUserSegmentRel updateCommerceNotificationTemplateUserSegmentRel(
CommerceNotificationTemplateUserSegmentRel commerceNotificationTemplateUserSegmentRel) {
return commerceNotificationTemplateUserSegmentRelPersistence.update(commerceNotificationTemplateUserSegmentRel);
} |
java | protected AqlQueryOptions mergeQueryOptions(final AqlQueryOptions oldStatic, final AqlQueryOptions newDynamic) {
if (oldStatic == null) {
return newDynamic;
}
if (newDynamic == null) {
return oldStatic;
}
final Integer batchSize = newDynamic.getBatchSize();
if (batchSize != null) {
oldStatic.batchSize(batchSize);
}
final Integer maxPlans = newDynamic.getMaxPlans();
if (maxPlans != null) {
oldStatic.maxPlans(maxPlans);
}
final Integer ttl = newDynamic.getTtl();
if (ttl != null) {
oldStatic.ttl(ttl);
}
final Boolean cache = newDynamic.getCache();
if (cache != null) {
oldStatic.cache(cache);
}
final Boolean count = newDynamic.getCount();
if (count != null) {
oldStatic.count(count);
}
final Boolean fullCount = newDynamic.getFullCount();
if (fullCount != null) {
oldStatic.fullCount(fullCount);
}
final Boolean profile = newDynamic.getProfile();
if (profile != null) {
oldStatic.profile(profile);
}
final Collection<String> rules = newDynamic.getRules();
if (rules != null) {
oldStatic.rules(rules);
}
return oldStatic;
} |
python | def CreateSms(self, MessageType, *TargetNumbers):
"""Creates an SMS message.
:Parameters:
MessageType : `enums`.smsMessageType*
Message type.
TargetNumbers : str
One or more target SMS numbers.
:return: An sms message object.
:rtype: `SmsMessage`
"""
return SmsMessage(self, chop(self._DoCommand('CREATE SMS %s %s' % (MessageType, ', '.join(TargetNumbers))), 2)[1]) |
python | def replace(self, seq):
'''
Performs search and replace on the given input string `seq` using
the values stored in this trie. This method uses a O(n**2)
chart-parsing algorithm to find the optimal way of replacing
matches in the input.
Arguments:
- `seq`:
'''
# #1: seq must be stored in a container with a len() function
seq = list(seq)
# chart is a (n-1) X (n) table
# chart[0] represents all matches of length (0+1) = 1
# chart[n-1] represents all matches/rewrites of length (n-1+1) = n
# chart[0][0] represents a match of length 1 starting at character 0
# chart[0][n-1] represents a match of length 1 starting at character n-1
# cells in the chart are tuples:
# (score, list)
# we initialise chart by filling in row 0:
# each cell gets assigned (0, char), where char is the character at
# the corresponding position in the input string
chart = [ [None for _i in range(len(seq)) ] for _i in range(len(seq)) ]
chart[0] = [(0, char) for char in seq]
# now we fill in the chart using the results from the aho-corasick
# string matches
for (begin, length, value) in self.find_all(seq):
chart[length-1][begin] = (length, value)
# now we need to fill in the chart row by row, starting with row 1
for row in range(1, len(chart)):
# each row is 1 cell shorter than the last
for col in range(len(seq) - row):
# the entry in [row][col] is the choice with the highest score; to
# find this, we must search the possible partitions of the cell
#
# things on row 2 have only one possible partition: 1 + 1
# things on row 3 have two: 1 + 2, 2 + 1
# things on row 4 have three: 1+3, 3+1, 2+2
#
# we assume that any pre-existing entry found by aho-corasick
# in a cell is already optimal
#print('scanning [{}][{}]'.format(row, col))
if chart[row][col] is not None:
continue
# chart[1][2] is the cell of matches of length 2 starting at
# character position 2;
# it can only be composed of chart[0][2] + chart[0][3]
#
# partition_point is the length of the first of the two parts
# of the cell
#print('cell[{}][{}] => '.format(row, col))
best_score = -1
best_value = None
for partition_point in range(row):
# the two cells will be [partition_point][col] and
# [row - partition_point - 2][col+partition_point+1]
x1 = partition_point
y1 = col
x2 = row - partition_point - 1
y2 = col + partition_point + 1
#print(' [{}][{}] + [{}][{}]'.format(x1, y1, x2, y2))
s1, v1 = chart[x1][y1]
s2, v2 = chart[x2][y2]
# compute the score
score = s1 + s2
#print(' = {} + {}'.format((s1, v1), (s2, v2)))
#print(' = score {}'.format(score))
if best_score < score:
best_score = score
best_value = v1 + v2
chart[row][col] = (best_score, best_value)
#print(' sets new best score with value {}'.format(
# best_value))
# now the optimal solution is stored at the top of the chart
return chart[len(seq)-1][0][1] |
java | public static void resetBean(MethodSpec.Builder methodBuilder, TypeName beanClass, String beanName,
ModelProperty property, String cursorName, String indexName) {
SQLTransform transform = lookup(property.getElement().asType());
if (transform == null) {
throw new IllegalArgumentException("Transform of " + property.getElement().asType() + " not supported");
}
transform.generateResetProperty(methodBuilder, beanClass, beanName, property, cursorName, indexName);
} |
java | protected <T extends CSSProperty> boolean genericProperty(Class<T> type,
TermIdent term, boolean avoidInherit,
Map<String, CSSProperty> properties, String propertyName) {
T property = genericPropertyRaw(type, null, term);
if (property == null || (avoidInherit && property.equalsInherit()))
return false;
properties.put(propertyName, property);
return true;
} |
java | private String getContentFromPath(String sourcePath,
HostsSourceType sourceType) throws IOException {
String res = "";
if (sourceType == HostsSourceType.LOCAL_FILE) {
res = PcFileNetworkIoUtils.readFileContentToString(sourcePath);
} else if (sourceType == HostsSourceType.URL) {
res = PcFileNetworkIoUtils.readStringFromUrlGeneric(sourcePath);
}
return res;
} |
python | def p_constant_def(t):
"""constant_def : CONST ID EQUALS constant SEMI"""
global name_dict
id = t[2]
value = t[4]
lineno = t.lineno(1)
if id_unique(id, 'constant', lineno):
name_dict[id] = const_info(id, value, lineno) |
java | public void validateNonce(ConsumerDetails consumerDetails, long timestamp, String nonce) throws AuthenticationException {
long nowSeconds = (System.currentTimeMillis() / 1000);
if ((nowSeconds - timestamp) > getValidityWindowSeconds()) {
throw new CredentialsExpiredException("Expired timestamp.");
}
} |
java | public QueryBuilder parentIds(final Set<Integer> ids) {
parentIds = new HashSet<Integer>();
if (ids != null) {
parentIds.addAll(ids);
}
return this;
} |
java | @Override
public CProduct findByUuid_Last(String uuid,
OrderByComparator<CProduct> orderByComparator)
throws NoSuchCProductException {
CProduct cProduct = fetchByUuid_Last(uuid, orderByComparator);
if (cProduct != null) {
return cProduct;
}
StringBundler msg = new StringBundler(4);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("uuid=");
msg.append(uuid);
msg.append("}");
throw new NoSuchCProductException(msg.toString());
} |
java | public static String getSettingForApp(App app, String key, String defaultValue) {
if (app != null) {
Map<String, Object> settings = app.getSettings();
if (settings.containsKey(key)) {
return String.valueOf(settings.getOrDefault(key, defaultValue));
} else if (app.isRootApp()) {
return Config.getConfigParam(key, defaultValue);
}
}
return defaultValue;
} |
python | def handle_aliases_in_init_files(name, import_alias_mapping):
"""Returns either None or the handled alias.
Used in add_module.
"""
for key, val in import_alias_mapping.items():
# e.g. Foo == Foo
# e.g. Foo.Bar startswith Foo.
if name == val or \
name.startswith(val + '.'):
# Replace val with key in name
# e.g. StarbucksVisitor.Tea -> Eataly.Tea because
# "from .nested_folder import StarbucksVisitor as Eataly"
return name.replace(val, key)
return None |
java | public void setReplicationTasks(java.util.Collection<ReplicationTask> replicationTasks) {
if (replicationTasks == null) {
this.replicationTasks = null;
return;
}
this.replicationTasks = new java.util.ArrayList<ReplicationTask>(replicationTasks);
} |
python | def register(self, resource=None, **kwargs):
""" Register resource for currnet API.
:param resource: Resource to be registered
:type resource: jsonapi.resource.Resource or None
:return: resource
:rtype: jsonapi.resource.Resource
.. versionadded:: 0.4.1
:param kwargs: Extra meta parameters
"""
if resource is None:
def wrapper(resource):
return self.register(resource, **kwargs)
return wrapper
for key, value in kwargs.items():
setattr(resource.Meta, key, value)
if resource.Meta.name in self.resource_map:
raise ValueError('Resource {} already registered'.format(
resource.Meta.name))
if resource.Meta.name_plural in self.resource_map:
raise ValueError(
'Resource plural name {} conflicts with registered resource'.
format(resource.Meta.name))
resource_plural_names = {
r.Meta.name_plural for r in self.resource_map.values()
}
if resource.Meta.name in resource_plural_names:
raise ValueError(
'Resource name {} conflicts with other resource plural name'.
format(resource.Meta.name)
)
resource.Meta.api = self
self._resources.append(resource)
return resource |
python | def doesnt_have(self, relation, boolean='and', extra=None):
"""
Add a relationship count to the query.
:param relation: The relation to count
:type relation: str
:param boolean: The boolean value
:type boolean: str
:param extra: The extra query
:type extra: Builder or callable
:rtype: Builder
"""
return self.has(relation, '<', 1, boolean, extra) |
python | def _graph_connected_component(graph, node_id):
"""
Find the largest graph connected components the contains one
given node
Parameters
----------
graph : array-like, shape: (n_samples, n_samples)
adjacency matrix of the graph, non-zero weight means an edge
between the nodes
node_id : int
The index of the query node of the graph
Returns
-------
connected_components : array-like, shape: (n_samples,)
An array of bool value indicates the indexes of the nodes
belong to the largest connected components of the given query
node
"""
connected_components = np.zeros(shape=(graph.shape[0]), dtype=np.bool)
connected_components[node_id] = True
n_node = graph.shape[0]
for i in range(n_node):
last_num_component = connected_components.sum()
_, node_to_add = np.where(graph[connected_components] != 0)
connected_components[node_to_add] = True
if last_num_component >= connected_components.sum():
break
return connected_components |
java | public Observable<PolicyDefinitionInner> getAsync(String policyDefinitionName) {
return getWithServiceResponseAsync(policyDefinitionName).map(new Func1<ServiceResponse<PolicyDefinitionInner>, PolicyDefinitionInner>() {
@Override
public PolicyDefinitionInner call(ServiceResponse<PolicyDefinitionInner> response) {
return response.body();
}
});
} |
python | def get_declared_items(self):
""" Get the members that were set in the enamldef block for this
Declaration. Layout keys are grouped together until the end so as
to avoid triggering multiple updates.
Returns
-------
result: List of (k,v) pairs that were defined for this widget in enaml
List of keys and values
"""
d = self.declaration
engine = d._d_engine
if engine:
layout = {}
for k, h in engine._handlers.items():
# Handlers with read operations
if not h.read_pair:
continue
v = getattr(d, k)
if k in LAYOUT_KEYS:
layout[k] = v
continue
yield (k, v)
if layout:
yield ('layout', layout) |
java | private void handleUndeliverableMessage(
DestinationHandler destinationHandler,
LinkHandler linkHandler,
SIMPMessage msg,
int exceptionReason,
String[] exceptionInserts,
TransactionCommon tran) throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(
tc,
"handleUndeliverableMessage",
new Object[] { destinationHandler, linkHandler, msg, new Integer(exceptionReason), exceptionInserts,
tran });
// Destination exception destination handler
ExceptionDestinationHandlerImpl destExceptionDestination = null;
// Link exception destination handler
ExceptionDestinationHandlerImpl linkExceptionDestination = null;
// Create handlers associated with the destination and link handlers
if (destinationHandler == null)
{
if (linkHandler == null)
{
// Where both handlers are null retain the behaviour that uses the default exception destination.
destExceptionDestination = new ExceptionDestinationHandlerImpl(null, _messageProcessor);
}
// if the linkHandler is non null, then we'll establish a linkExceptionDestination below
}
else
{
// destinationHandler is non null
destExceptionDestination = new ExceptionDestinationHandlerImpl(destinationHandler);
} // eof destinationHandler != null
UndeliverableReturnCode rc = UndeliverableReturnCode.OK;
// Pass the undeliverable message to the destination exception destination if it is not null
if (destExceptionDestination != null)
{
rc =
destExceptionDestination.handleUndeliverableMessage(
msg,
tran,
exceptionReason,
exceptionInserts);
}
// If no destination exception destination was established or if we got a BLOCK return from the use
// of the destination exception destination, then we drive the link exception destination
if (destExceptionDestination == null || rc == UndeliverableReturnCode.BLOCK)
{
// Get the Link Exception Destination
if (linkHandler != null)
{
linkExceptionDestination = new ExceptionDestinationHandlerImpl(linkHandler);
rc =
linkExceptionDestination.handleUndeliverableMessage(
msg,
tran,
exceptionReason,
exceptionInserts);
}
}
// If the ExceptionDestinationHandlerImpl.handleUndeliverableMessage() call returned either a
// "BLOCK" or an "ERROR" return then we throw an exception. BLOCK will be returned if the exception
// destination is full or if a null or empty exception destination was configured.
//
// In the error case we generate an FFDC but in both cases we throw an exception to get the caller to
// fail the attempt to store the message, forcing it to either return the error or retry at it's discretion.
if (rc == UndeliverableReturnCode.BLOCK)
{
// Throw an exception
SIResourceException e = new SIResourceException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0005",
new Object[] {
"com.ibm.ws.sib.processor.impl.PtoPInputHandler",
"1:3103:1.323",
rc },
null));
SibTr.exception(tc, e);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "handleUndeliverableMessage", e);
throw e;
}
// An ERROR just isn't acceptable
else if (rc == UndeliverableReturnCode.ERROR)
{
//We cannot put the message to the exception destination. Throw an
//exception and trace FFST.
SIErrorException e = new SIErrorException(
nls.getFormattedMessage(
"INTERNAL_MESSAGING_ERROR_CWSIP0005",
new Object[] {
"com.ibm.ws.sib.processor.impl.PtoPInputHandler",
"1:3124:1.323",
rc },
null));
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.PtoPInputHandler.handleUndeliverableMessage",
"1:3131:1.323",
this);
SibTr.exception(tc, e);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "handleUndeliverableMessage", e);
throw e;
}
// otherwise a DISCARD is equivalent to an OK so let it go
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "handleUndeliverableMessage");
return;
} |
python | def list(self, host_rec=None, service_rec=None, hostfilter=None):
"""
Returns a list of vulnerabilities based on t_hosts.id or t_services.id.
If neither are set then statistical results are added
:param host_rec: db.t_hosts.id
:param service_rec: db.t_services.id
:param hostfilter: Valid hostfilter or None
:return: [(vulndata) ...] if host_rec or service_rec set
:return: [(vulndata, vuln_cnt, [vuln_ip, ...], [services ...]) ...] if nothing sent
"""
return self.send.vuln_list(host_rec, service_rec, hostfilter) |
python | def _partialParseDateStr(self, s, sourceTime):
"""
test if giving C{s} matched CRE_DATE3, used by L{parse()}
@type s: string
@param s: date/time text to evaluate
@type sourceTime: struct_time
@param sourceTime: C{struct_time} value to use as the base
@rtype: tuple
@return: tuple of remained date/time text, datetime object and
an boolean value to describ if matched or not
"""
parseStr = None
chunk1 = chunk2 = ''
m = self.ptc.CRE_DATE3.search(s)
# NO LONGER NEEDED, THE REGEXP HANDLED MTHNAME NOW
# for match in self.ptc.CRE_DATE3.finditer(s):
# to prevent "HH:MM(:SS) time strings" expressions from
# triggering this regex, we checks if the month field
# exists in the searched expression, if it doesn't exist,
# the date field is not valid
# if match.group('mthname'):
# m = self.ptc.CRE_DATE3.search(s, match.start())
# valid_date = True
# break
# String date format
if m is not None:
if (m.group('date') != s):
# capture remaining string
mStart = m.start('date')
mEnd = m.end('date')
# we need to check that anything following the parsed
# date is a time expression because it is often picked
# up as a valid year if the hour is 2 digits
fTime = False
mm = self.ptc.CRE_TIMEHMS2.search(s)
# "February 24th 1PM" doesn't get caught
# "February 24th 12PM" does
mYear = m.group('year')
if mm is not None and mYear is not None:
fTime = True
else:
# "February 24th 12:00"
mm = self.ptc.CRE_TIMEHMS.search(s)
if mm is not None and mYear is None:
fTime = True
if fTime:
hoursStart = mm.start('hours')
if hoursStart < m.end('year'):
mEnd = hoursStart
parseStr = s[mStart:mEnd]
chunk1 = s[:mStart]
chunk2 = s[mEnd:]
s = '%s %s' % (chunk1, chunk2)
else:
parseStr = s
s = ''
if parseStr:
debug and log.debug(
'found (date3) [%s][%s][%s]', parseStr, chunk1, chunk2)
sourceTime = self._evalDateStr(parseStr, sourceTime)
return s, sourceTime, bool(parseStr) |
java | public void addFilterAt(IRuleFilter filter, Class<? extends IRuleFilter> atFilter) {
int index = getIndexOfClass(filters, atFilter);
if (index == -1) {
throw new FilterAddException("filter " + atFilter.getSimpleName() + " has not been added");
}
filters.remove(index);
filters.add(index, filter);
} |
java | public static Object[] extract(String xmltext) throws AesException {
Object[] result = new Object[3];
try {
DocumentBuilder db = Wxs.xmls();
StringReader sr = new StringReader(xmltext);
InputSource is = new InputSource(sr);
Document document = db.parse(is);
Element root = document.getDocumentElement();
NodeList nodelist1 = root.getElementsByTagName("Encrypt");
NodeList nodelist2 = root.getElementsByTagName("ToUserName");
result[0] = 0;
result[1] = nodelist1.item(0).getTextContent();
result[2] = nodelist2.item(0).getTextContent();
return result;
} catch (Exception e) {
e.printStackTrace();
throw new AesException(AesException.ParseXmlError);
}
} |
java | public void writeUnmodifiedUTF (String str)
throws IOException
{
// byte[] bytes = str.getBytes(Charsets.UTF_8); // TODO Java 6 (Charsets is from guava)
byte[] bytes = str.getBytes("UTF-8");
writeShort(bytes.length);
write(bytes);
} |
java | private static int[] filter(int[] org, int skip1, int skip2) {
int n = 0;
int[] dest = new int[org.length - 2];
for (int w : org) {
if (w != skip1 && w != skip2) dest[n++] = w;
}
return dest;
} |
java | public static Img createRemoteImg(BufferedImage bimg){
int type = bimg.getRaster().getDataBuffer().getDataType();
if(type != DataBuffer.TYPE_INT){
throw new IllegalArgumentException(
String.format("cannot create Img as remote of provided BufferedImage!%n"
+ "Need BufferedImage with DataBuffer of type TYPE_INT (%d). Provided type: %d",
DataBuffer.TYPE_INT, type));
}
Img img = new Img(
new Dimension(bimg.getWidth(),bimg.getHeight()),
((DataBufferInt)bimg.getRaster().getDataBuffer()).getData()
);
return img;
} |
java | public void releaseLock(Object key) {
ReentrantReadWriteLock lock = getLock(key);
if (lock.isWriteLockedByCurrentThread()) {
lock.writeLock().unlock();
if (trace) log.tracef("WL released for '%s'", key);
} else {
lock.readLock().unlock();
if (trace) log.tracef("RL released for '%s'", key);
}
} |
python | def is_handler(cls, name, value):
"""Detect an handler and return its wanted signal name."""
signal_name = False
config = None
if callable(value) and hasattr(value, SPEC_CONTAINER_MEMBER_NAME):
spec = getattr(value, SPEC_CONTAINER_MEMBER_NAME)
if spec['kind'] == 'handler':
signal_name = spec['name']
config = spec['config']
return signal_name, config |
java | public Response bind(String name, Object model) {
getLocals().put(name, model);
return this;
} |
java | private MessageML parseMessageML(String messageML, String version) throws InvalidInputException, ProcessingException {
validateMessageText(messageML);
org.w3c.dom.Element docElement = parseDocument(messageML);
validateEntities(docElement, entityJson);
switch (docElement.getTagName()) {
case MessageML.MESSAGEML_TAG:
this.messageFormat = FormatEnum.MESSAGEML;
if (StringUtils.isBlank(version)) {
version = MessageML.MESSAGEML_VERSION;
}
break;
case MessageML.PRESENTATIONML_TAG:
this.messageFormat = FormatEnum.PRESENTATIONML;
break;
default:
throw new InvalidInputException("Root tag must be <" + MessageML.MESSAGEML_TAG + ">"
+ " or <" + MessageML.PRESENTATIONML_TAG + ">");
}
MessageML result = new MessageML(messageFormat, version);
result.buildAll(this, docElement);
result.validate();
return result;
} |
python | def append_position(path, position, separator=''):
"""
Concatenate a path and a position,
between the filename and the extension.
"""
filename, extension = os.path.splitext(path)
return ''.join([filename, separator, str(position), extension]) |
python | def memory(self):
"""Memory information in bytes
Example:
>>> print(ctx.device(0).memory())
{'total': 4238016512L, 'used': 434831360L, 'free': 3803185152L}
Returns:
total/used/free memory in bytes
"""
class GpuMemoryInfo(Structure):
_fields_ = [
('total', c_ulonglong),
('free', c_ulonglong),
('used', c_ulonglong),
]
c_memory = GpuMemoryInfo()
_check_return(_NVML.get_function(
"nvmlDeviceGetMemoryInfo")(self.hnd, byref(c_memory)))
return {'total': c_memory.total, 'free': c_memory.free, 'used': c_memory.used} |
python | def set_total_deposit(
self,
given_block_identifier: BlockSpecification,
channel_identifier: ChannelID,
total_deposit: TokenAmount,
partner: Address,
):
""" Set channel's total deposit.
`total_deposit` has to be monotonically increasing, this is enforced by
the `TokenNetwork` smart contract. This is done for the same reason why
the balance proofs have a monotonically increasing transferred amount,
it simplifies the analysis of bad behavior and the handling code of
out-dated balance proofs.
Races to `set_total_deposit` are handled by the smart contract, where
largest total deposit wins. The end balance of the funding accounts is
undefined. E.g.
- Acc1 calls set_total_deposit with 10 tokens
- Acc2 calls set_total_deposit with 13 tokens
- If Acc2's transaction is mined first, then Acc1 token supply is left intact.
- If Acc1's transaction is mined first, then Acc2 will only move 3 tokens.
Races for the same account don't have any unexpeted side-effect.
Raises:
DepositMismatch: If the new request total deposit is lower than the
existing total deposit on-chain for the `given_block_identifier`.
RaidenRecoverableError: If the channel was closed meanwhile the
deposit was in transit.
RaidenUnrecoverableError: If the transaction was sucessful and the
deposit_amount is not as large as the requested value.
RuntimeError: If the token address is empty.
ValueError: If an argument is of the invalid type.
"""
if not isinstance(total_deposit, int):
raise ValueError('total_deposit needs to be an integer number.')
token_address = self.token_address()
token = Token(
jsonrpc_client=self.client,
token_address=token_address,
contract_manager=self.contract_manager,
)
checking_block = self.client.get_checking_block()
error_prefix = 'setTotalDeposit call will fail'
with self.channel_operations_lock[partner], self.deposit_lock:
previous_total_deposit = self._detail_participant(
channel_identifier=channel_identifier,
participant=self.node_address,
partner=partner,
block_identifier=given_block_identifier,
).deposit
amount_to_deposit = TokenAmount(total_deposit - previous_total_deposit)
log_details = {
'token_network': pex(self.address),
'channel_identifier': channel_identifier,
'node': pex(self.node_address),
'partner': pex(partner),
'new_total_deposit': total_deposit,
'previous_total_deposit': previous_total_deposit,
}
try:
self._deposit_preconditions(
channel_identifier=channel_identifier,
total_deposit=total_deposit,
partner=partner,
token=token,
previous_total_deposit=previous_total_deposit,
log_details=log_details,
block_identifier=given_block_identifier,
)
except NoStateForBlockIdentifier:
# If preconditions end up being on pruned state skip them. Estimate
# gas will stop us from sending a transaction that will fail
pass
# If there are channels being set up concurrenlty either the
# allowance must be accumulated *or* the calls to `approve` and
# `setTotalDeposit` must be serialized. This is necessary otherwise
# the deposit will fail.
#
# Calls to approve and setTotalDeposit are serialized with the
# deposit_lock to avoid transaction failure, because with two
# concurrent deposits, we may have the transactions executed in the
# following order
#
# - approve
# - approve
# - setTotalDeposit
# - setTotalDeposit
#
# in which case the second `approve` will overwrite the first,
# and the first `setTotalDeposit` will consume the allowance,
# making the second deposit fail.
token.approve(
allowed_address=Address(self.address),
allowance=amount_to_deposit,
)
gas_limit = self.proxy.estimate_gas(
checking_block,
'setTotalDeposit',
channel_identifier=channel_identifier,
participant=self.node_address,
total_deposit=total_deposit,
partner=partner,
)
if gas_limit:
gas_limit = safe_gas_limit(gas_limit, GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT)
error_prefix = 'setTotalDeposit call failed'
log.debug('setTotalDeposit called', **log_details)
transaction_hash = self.proxy.transact(
'setTotalDeposit',
gas_limit,
channel_identifier=channel_identifier,
participant=self.node_address,
total_deposit=total_deposit,
partner=partner,
)
self.client.poll(transaction_hash)
receipt_or_none = check_transaction_threw(self.client, transaction_hash)
transaction_executed = gas_limit is not None
if not transaction_executed or receipt_or_none:
if transaction_executed:
block = receipt_or_none['blockNumber']
else:
block = checking_block
self.proxy.jsonrpc_client.check_for_insufficient_eth(
transaction_name='setTotalDeposit',
transaction_executed=transaction_executed,
required_gas=GAS_REQUIRED_FOR_SET_TOTAL_DEPOSIT,
block_identifier=block,
)
error_type, msg = self._check_why_deposit_failed(
channel_identifier=channel_identifier,
partner=partner,
token=token,
amount_to_deposit=amount_to_deposit,
total_deposit=total_deposit,
transaction_executed=transaction_executed,
block_identifier=block,
)
error_msg = f'{error_prefix}. {msg}'
if error_type == RaidenRecoverableError:
log.warning(error_msg, **log_details)
else:
log.critical(error_msg, **log_details)
raise error_type(error_msg)
log.info('setTotalDeposit successful', **log_details) |
python | def get_user(self, username="", ext_collections=False, ext_galleries=False):
"""Get user profile information
:param username: username to lookup profile of
:param ext_collections: Include collection folder info
:param ext_galleries: Include gallery folder info
"""
if not username and self.standard_grant_type == "authorization_code":
response = self._req('/user/whoami')
u = User()
u.from_dict(response)
else:
if not username:
raise DeviantartError("No username defined.")
else:
response = self._req('/user/profile/{}'.format(username), {
'ext_collections' : ext_collections,
'ext_galleries' : ext_galleries
})
u = User()
u.from_dict(response['user'])
return u |
java | JmsConnectionImpl instantiateConnection(JmsJcaConnection jcaConnection, Map<String, String> _passThruProps) throws JMSException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "instantiateConnection", jcaConnection);
JmsConnectionImpl jmsConnection = new JmsConnectionImpl(jcaConnection, isManaged(), _passThruProps);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "instantiateConnection", jmsConnection);
return jmsConnection;
} |
python | def _GetFileMappingsByPath(self, key_path_upper):
"""Retrieves the Windows Registry file mappings for a specific path.
Args:
key_path_upper (str): Windows Registry key path, in upper case with
a resolved root key alias.
Yields:
WinRegistryFileMapping: Windows Registry file mapping.
"""
candidate_mappings = []
for mapping in self._REGISTRY_FILE_MAPPINGS_NT:
if key_path_upper.startswith(mapping.key_path_prefix.upper()):
candidate_mappings.append(mapping)
# Sort the candidate mappings by longest (most specific) match first.
candidate_mappings.sort(
key=lambda mapping: len(mapping.key_path_prefix), reverse=True)
for mapping in candidate_mappings:
yield mapping |
java | public synchronized CuratorFramework getLocalConnection() throws IOException
{
if ( localConnection == null )
{
CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder()
.connectString("localhost:" + configManager.getConfig().getInt(IntConfigs.CLIENT_PORT))
.sessionTimeoutMs(arguments.connectionTimeOutMs * 10)
.connectionTimeoutMs(arguments.connectionTimeOutMs)
.retryPolicy(new ExponentialBackoffRetry(1000, 3));
if ( arguments.aclProvider != null )
{
builder = builder.aclProvider(arguments.aclProvider);
}
localConnection = builder.build();
localConnection.start();
}
return localConnection;
} |
python | def delete_firewall_rule(self, server_uuid, firewall_rule_position):
"""
Delete a firewall rule based on a server uuid and rule position.
"""
url = '/server/{0}/firewall_rule/{1}'.format(server_uuid, firewall_rule_position)
return self.request('DELETE', url) |
python | def bbox(self):
"""BBox"""
return self.left, self.top, self.right, self.bottom |
java | public Member register(RegistrationRequest registrationRequest) throws RegistrationException {
Member member = getMember(registrationRequest.getEnrollmentID());
member.register(registrationRequest);
return member;
} |
java | @Transactional
public <T> List<T> findProperty(Class<T> propertyType, E entity, SearchParameters sp, List<Attribute<?, ?>> attributes) {
if (sp.hasNamedQuery()) {
return byNamedQueryUtil.findByNamedQuery(sp);
}
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<T> criteriaQuery = builder.createQuery(propertyType);
if (sp.getDistinct()) {
criteriaQuery.distinct(true);
}
Root<E> root = criteriaQuery.from(type);
Path<T> path = jpaUtil.getPath(root, attributes);
criteriaQuery.select(path);
// predicate
Predicate predicate = getPredicate(criteriaQuery, root, builder, entity, sp);
if (predicate != null) {
criteriaQuery = criteriaQuery.where(predicate);
}
// fetches
fetches(sp, root);
// order by
// we do not want to follow order by specified in search parameters
criteriaQuery.orderBy(builder.asc(path));
TypedQuery<T> typedQuery = entityManager.createQuery(criteriaQuery);
applyCacheHints(typedQuery, sp);
jpaUtil.applyPagination(typedQuery, sp);
List<T> entities = typedQuery.getResultList();
log.fine("Returned " + entities.size() + " elements");
return entities;
} |
python | def _ingest_string(self, input_string, path_to_root):
'''
a helper method for ingesting a string
:return: valid_string
'''
valid_string = ''
try:
valid_string = self._validate_string(input_string, path_to_root)
except:
rules_path_to_root = re.sub('\[\d+\]', '[0]', path_to_root)
if 'default_value' in self.keyMap[rules_path_to_root]:
valid_string = self.keyMap[rules_path_to_root]['default_value']
return valid_string |
python | def connection_cache(func: callable):
"""Connection cache for SSH sessions. This is to prevent opening a
new, expensive connection on every command run."""
cache = dict()
lock = RLock()
@wraps(func)
def func_wrapper(host: str, username: str, *args, **kwargs):
key = "{h}-{u}".format(h=host, u=username)
if key in cache:
# connection exists, check if it is still valid before
# returning it.
conn = cache[key]
if conn and conn.is_active() and conn.is_authenticated():
return conn
else:
# try to close a bad connection and remove it from
# the cache.
if conn:
try_close(conn)
del cache[key]
# key is not in the cache, so try to recreate it
# it may have been removed just above.
if key not in cache:
conn = func(host, username, *args, **kwargs)
if conn is not None:
cache[key] = conn
return conn
# not sure how to reach this point, but just in case.
return None
def get_cache() -> dict:
return cache
def purge(key: str=None):
with lock:
if key is None:
conns = [(k, v) for k, v in cache.items()]
elif key in cache:
conns = ((key, cache[key]), )
else:
conns = list()
for k, v in conns:
try_close(v)
del cache[k]
func_wrapper.get_cache = get_cache
func_wrapper.purge = purge
return func_wrapper |
java | public void store(Object key, Object value) {
getJdbcTemplate().update(getMergeSql(),
new BeanPropertySqlParameterSource(value));
} |
java | public void end() {
ProfilingTimerNode currentNode = current.get();
if (currentNode != null) {
currentNode.stop();
current.set(currentNode.parent);
}
} |
python | def get_sections(self, s, base,
sections=['Parameters', 'Other Parameters']):
"""
Method that extracts the specified sections out of the given string if
(and only if) the docstring follows the numpy documentation guidelines
[1]_. Note that the section either must appear in the
:attr:`param_like_sections` or the :attr:`text_sections` attribute.
Parameters
----------
s: str
Docstring to split
base: str
base to use in the :attr:`sections` attribute
sections: list of str
sections to look for. Each section must be followed by a newline
character ('\\n') and a bar of '-' (following the numpy (napoleon)
docstring conventions).
Returns
-------
str
The replaced string
References
----------
.. [1] https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
See Also
--------
delete_params, keep_params, delete_types, keep_types, delete_kwargs:
For manipulating the docstring sections
save_docstring:
for saving an entire docstring
"""
params = self.params
# Remove the summary and dedent the rest
s = self._remove_summary(s)
for section in sections:
key = '%s.%s' % (base, section.lower().replace(' ', '_'))
params[key] = self._get_section(s, section)
return s |
java | public RunList<R> node(final Node node) {
return filter(new Predicate<R>() {
public boolean apply(R r) {
return (r instanceof AbstractBuild) && ((AbstractBuild)r).getBuiltOn()==node;
}
});
} |
python | def info(name):
'''Show the information of the given virtual folder.
NAME: Name of a virtual folder.
'''
with Session() as session:
try:
result = session.VFolder(name).info()
print('Virtual folder "{0}" (ID: {1})'
.format(result['name'], result['id']))
print('- Owner:', result['is_owner'])
print('- Permission:', result['permission'])
print('- Number of files: {0}'.format(result['numFiles']))
except Exception as e:
print_error(e)
sys.exit(1) |
java | @edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE",
justification="Checked in precondition")
static Path pathForDataset(Path root, @Nullable String namespace, @Nullable String name) {
Preconditions.checkNotNull(namespace, "Namespace cannot be null");
Preconditions.checkNotNull(name, "Dataset name cannot be null");
// Why replace '.' here? Is this a namespacing hack?
return new Path(root, new Path(namespace, name.replace('.', Path.SEPARATOR_CHAR)));
} |
java | public NamespaceContext buildContext(Message receivedMessage, Map<String, String> namespaces) {
SimpleNamespaceContext simpleNamespaceContext = new SimpleNamespaceContext();
//first add default namespace definitions
if (namespaceMappings.size() > 0) {
simpleNamespaceContext.setBindings(namespaceMappings);
}
Map<String, String> dynamicBindings = XMLUtils.lookupNamespaces(receivedMessage.getPayload(String.class));
if (!CollectionUtils.isEmpty(namespaces)) {
//dynamic binding of namespaces declarations in root element of received message
for (Entry<String, String> binding : dynamicBindings.entrySet()) {
//only bind namespace that is not present in explicit namespace bindings
if (!namespaces.containsValue(binding.getValue())) {
simpleNamespaceContext.bindNamespaceUri(binding.getKey(), binding.getValue());
}
}
//add explicit namespace bindings
simpleNamespaceContext.setBindings(namespaces);
} else {
simpleNamespaceContext.setBindings(dynamicBindings);
}
return simpleNamespaceContext;
} |
java | public void setDefault() {
try {
TelephonyManager telephonyManager = (TelephonyManager) getContext().getSystemService(Context.TELEPHONY_SERVICE);
String phone = telephonyManager.getLine1Number();
if (phone != null && !phone.isEmpty()) {
this.setNumber(phone);
} else {
String iso = telephonyManager.getNetworkCountryIso();
setEmptyDefault(iso);
}
} catch (SecurityException e) {
setEmptyDefault();
}
} |
python | def get_minimum_needs(self):
"""Get the minimum needed information about the minimum needs.
That is the resource and the amount.
:returns: minimum needs
:rtype: OrderedDict
"""
minimum_needs = OrderedDict()
for resource in self.minimum_needs['resources']:
if resource['Unit abbreviation']:
name = '%s [%s]' % (
tr(resource['Resource name']),
resource['Unit abbreviation']
)
else:
name = tr(resource['Resource name'])
amount = resource['Default']
minimum_needs[name] = amount
return OrderedDict(minimum_needs) |
python | def to_array(self):
"""
Serializes this InputMediaAnimation to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(InputMediaAnimation, self).to_array()
# 'type' given by superclass
# 'media' given by superclass
if self.thumb is not None:
if isinstance(self.thumb, InputFile):
array['thumb'] = None # type InputFile
elif isinstance(self.thumb, str):
array['thumb'] = u(self.thumb) # py2: type unicode, py3: type str
else:
raise TypeError('Unknown type, must be one of InputFile, str.')
# end if
# 'caption' given by superclass
# 'parse_mode' given by superclass
if self.width is not None:
array['width'] = int(self.width) # type int
if self.height is not None:
array['height'] = int(self.height) # type int
if self.duration is not None:
array['duration'] = int(self.duration) # type int
return array |
python | def p_rule(self, rule):
'''rule : GUIDELINE
| REGULATION'''
if len(rule[1]) == 4:
# This is a guideline
rule[0] = Guideline(rule[1][1], rule[1][2], rule[1][3])
else:
# This is a regulation
indentsize = rule[1][0]
number = rule[1][1]
text = rule[1][2]
parent = None
# If we just "un"nested, shrink the current rule to our level
if self.prev_indent > indentsize:
self.current_rule = self.current_rule[0:indentsize+1]
# We just added a nested level, the parent is the list's last elem
if self.prev_indent < indentsize:
parent = self.current_rule[-1]
# Else, if we are nested the parent is the one before the last elem
elif len(self.current_rule) > 1:
parent = self.current_rule[-2]
# Else if we are not nested, then we are a root rule and parent is none
# (do nothing as parent is initialized to none)
# Create the regulation node
reg = Regulation(number, text, parent)
# Let our parent knows he has a new child, if we don't have a parent
# let's create an item in the article rules list
if parent:
parent.add_child(reg)
else:
rule[0] = reg
# Unless we nested, pop and replace the last rule by ourself
# If we added a nesting level, we just need to add ourself
if self.prev_indent >= indentsize:
self.current_rule.pop()
self.current_rule.append(reg)
self.prev_indent = indentsize |
python | def offset_to_line(self, offset):
"""
Converts 0-based character offset to pair (line, col) of 1-based line and 0-based column
numbers.
"""
offset = max(0, min(self._text_len, offset))
line_index = bisect.bisect_right(self._line_offsets, offset) - 1
return (line_index + 1, offset - self._line_offsets[line_index]) |
python | def heartbeat(self):
"""Heartbeat request to keep session alive.
"""
unique_id = self.new_unique_id()
message = {
'op': 'heartbeat',
'id': unique_id,
}
self._send(message)
return unique_id |
java | @JsonIgnore
public void setUnknownFields(final Map<String,Object> unknownFields) {
this.unknownFields.clear();
this.unknownFields.putAll(unknownFields);
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.