language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public AlgoResponse pipe(Object input) throws APIException {
if (input instanceof String) {
return pipeRequest((String)input,ContentType.Text);
} else if (input instanceof byte[]) {
return pipeBinaryRequest((byte[])input);
} else {
return pipeRequest(gson.toJsonTree(input).toString(),ContentType.Json);
}
} |
java | static String getArrayElementStringValue(Node n) {
return (NodeUtil.isNullOrUndefined(n) || n.isEmpty())
? "" : getStringValue(n);
} |
java | public Collection<Monitor> list(String name, String type)
{
return list(name, type, 0, -1);
} |
python | def output(self) -> None:
"""Pretty print travel times."""
print("%s - %s" % (self.station, self.now))
print(self.products_filter)
for j in sorted(self.journeys, key=lambda k: k.real_departure)[
: self.max_journeys
]:
print("-------------")
print(f"{j.product}: {j.number} ({j.train_id})")
print(f"Richtung: {j.direction}")
print(f"Abfahrt in {j.real_departure} min.")
print(f"Abfahrt {j.departure.time()} (+{j.delay})")
print(f"Nächste Haltestellen: {([s['station'] for s in j.stops])}")
if j.info:
print(f"Hinweis: {j.info}")
print(f"Hinweis (lang): {j.info_long}")
print(f"Icon: {j.icon}") |
java | @Override
public EClass getFile() {
if (fileEClass == null) {
fileEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(47);
}
return fileEClass;
} |
java | public synchronized void updateMasterConf(PropertyKey key, @Nullable String value) {
mMasters.forEach(master -> master.updateConf(key, value));
} |
python | def change_return_type(f):
"""
Converts the returned value of wrapped function to the type of the
first arg or to the type specified by a kwarg key return_type's value.
"""
@wraps(f)
def wrapper(*args, **kwargs):
if kwargs.has_key('return_type'):
return_type = kwargs['return_type']
kwargs.pop('return_type')
return return_type(f(*args, **kwargs))
elif len(args) > 0:
return_type = type(args[0])
return return_type(f(*args, **kwargs))
else:
return f(*args, **kwargs)
return wrapper |
python | def _legacy_api_registration_check(self):
'''
Check registration status through API
'''
logger.debug('Checking registration status...')
machine_id = generate_machine_id()
try:
url = self.api_url + '/v1/systems/' + machine_id
net_logger.info("GET %s", url)
res = self.session.get(url, timeout=self.config.http_timeout)
except requests.ConnectionError:
# can't connect, run connection test
logger.error('Connection timed out. Running connection test...')
self.test_connection()
return False
# had to do a quick bugfix changing this around,
# which makes the None-False-True dichotomy seem weird
# TODO: reconsider what gets returned, probably this:
# True for registered
# False for unregistered
# None for system 404
try:
# check the 'unregistered_at' key of the response
unreg_status = json.loads(res.content).get('unregistered_at', 'undefined')
# set the global account number
self.config.account_number = json.loads(res.content).get('account_number', 'undefined')
except ValueError:
# bad response, no json object
return False
if unreg_status == 'undefined':
# key not found, machine not yet registered
return None
elif unreg_status is None:
# unregistered_at = null, means this machine IS registered
return True
else:
# machine has been unregistered, this is a timestamp
return unreg_status |
python | def _cnvkit_fix(cnns, background_cnn, items, ckouts):
"""Normalize samples, correcting sources of bias.
"""
return [_cnvkit_fix_base(cnns, background_cnn, items, ckouts)] |
python | def write_output(self):
"""Write all stored output data to storage."""
for data in self.output_data.values():
self.create_output(data.get('key'), data.get('value'), data.get('type')) |
python | def to_utf8(self, data):
"""
Convert unicode values to strings even if they belong to lists or dicts.
:param data: an object.
:return: The object with all unicode values converted to string.
"""
# if this is a unicode string, return its string representation
if isinstance(data, unicode):
return data.encode('utf-8')
# if this is a list of values, return list of string values
if isinstance(data, list):
return [self.to_utf8(item) for item in data]
# if this is a dictionary, return dictionary of string keys and values
if isinstance(data, dict):
return {
self.to_utf8(key): self.to_utf8(value)
for key, value in data.iteritems()
}
# if it's anything else, return it in its original form
return data |
java | public PersistentTranId getPersistentTranId()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
{
SibTr.entry(this, tc, "getPersistentTranId");
SibTr.exit(this, tc, "getPersistentTranId", "return="+_ptid);
}
return _ptid;
} |
python | def load(self):
"""Load proxy list from configured proxy source"""
self._list = self._source.load()
self._list_iter = itertools.cycle(self._list) |
python | def init(**kw):
""" Initialize an instance of :class:`fedmsg.core.FedMsgContext`.
The config is loaded with :func:`fedmsg.config.load_config` and updated
by any keyword arguments. This config is used to initialize the context
object.
The object is stored in a thread local as
:data:`fedmsg.__local.__context`.
"""
if getattr(__local, '__context', None):
raise ValueError("fedmsg already initialized")
# Read config from CLI args and a config file
config = fedmsg.config.load_config([], None)
# Override the defaults with whatever the user explicitly passes in.
config.update(kw)
__local.__context = fedmsg.core.FedMsgContext(**config)
return __local.__context |
java | @Override
public void contains(JsonElement expectedJson) {
assertTrue("Expected to find " + GSON.toJson(expectedJson), checkContains(expectedJson));
} |
java | public void setAWSAccountIds(java.util.Collection<String> aWSAccountIds) {
if (aWSAccountIds == null) {
this.aWSAccountIds = null;
return;
}
this.aWSAccountIds = new com.amazonaws.internal.SdkInternalList<String>(aWSAccountIds);
} |
python | def _listdir(self, root):
"List directory 'root' appending the path separator to subdirs."
res = []
for name in os.listdir(root):
path = os.path.join(root, name)
if os.path.isdir(path):
name += os.sep
res.append(name)
return res |
python | def send_request(req_cat, con, req_str, kwargs):
"""
Sends request to facebook graph
Returns the facebook-json response converted to python object
"""
try:
kwargs = parse.urlencode(kwargs) #python3x
except:
kwargs = urllib.urlencode(kwargs) #python2x
"""
Wrapper to keep TCP connection ESTABLISHED. Rather the connection go to
CLOSE_WAIT and raise errors CannotSendRequest or the server reply with
empty and it raise BadStatusLine
"""
try:
con.request(req_cat, req_str, kwargs) #send request to facebook graph
except httplib.CannotSendRequest:
con = create()
con.request(req_cat, req_str, kwargs)
try:
res = con.getresponse().read() #read response
except (IOError, httplib.BadStatusLine):
con = create()
con.request(req_cat, req_str, kwargs)
res = con.getresponse().read()
t = type(res)
if type(res) == t:
res = bytes.decode(res)
return json.loads(res) |
java | public java.util.List<String> getApprovedPatches() {
if (approvedPatches == null) {
approvedPatches = new com.amazonaws.internal.SdkInternalList<String>();
}
return approvedPatches;
} |
java | public static <T> Set<T> toSet(Enumeration<T> self) {
Set<T> answer = new HashSet<T>();
while (self.hasMoreElements()) {
answer.add(self.nextElement());
}
return answer;
} |
java | public Observable<ServiceResponse<Page<RemoteLoginInformationInner>>> listRemoteLoginInformationNextWithServiceResponseAsync(final String nextPageLink) {
return listRemoteLoginInformationNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<RemoteLoginInformationInner>>, Observable<ServiceResponse<Page<RemoteLoginInformationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<RemoteLoginInformationInner>>> call(ServiceResponse<Page<RemoteLoginInformationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listRemoteLoginInformationNextWithServiceResponseAsync(nextPageLink));
}
});
} |
java | @NonNull
public static CreateTypeStart createType(@Nullable String keyspace, @NonNull String typeName) {
return createType(
keyspace == null ? null : CqlIdentifier.fromCql(keyspace), CqlIdentifier.fromCql(typeName));
} |
python | def SetValue(self, value, raise_on_error=True):
"""Receives a value and fills it into a DataBlob.
Args:
value: value to set
raise_on_error: if True, raise if we can't serialize. If False, set the
key to an error string.
Returns:
self
Raises:
TypeError: if the value can't be serialized and raise_on_error is True
"""
type_mappings = [(Text, "string"), (bytes, "data"), (bool, "boolean"),
(int, "integer"), (long, "integer"), (dict, "dict"),
(float, "float")]
if value is None:
self.none = "None"
elif isinstance(value, rdfvalue.RDFValue):
self.rdf_value.data = value.SerializeToString()
self.rdf_value.age = int(value.age)
self.rdf_value.name = value.__class__.__name__
elif isinstance(value, (list, tuple)):
self.list.content.Extend([
DataBlob().SetValue(v, raise_on_error=raise_on_error) for v in value
])
elif isinstance(value, set):
self.set.content.Extend([
DataBlob().SetValue(v, raise_on_error=raise_on_error) for v in value
])
elif isinstance(value, dict):
self.dict.FromDict(value, raise_on_error=raise_on_error)
else:
for type_mapping, member in type_mappings:
if isinstance(value, type_mapping):
setattr(self, member, value)
return self
message = "Unsupported type for ProtoDict: %s" % type(value)
if raise_on_error:
raise TypeError(message)
setattr(self, "string", message)
return self |
java | @Override
public synchronized void reset() throws IOException {
try {
this.digest = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
/*
* Not much to do here. We know the algorithm existed when we created the initial MessageDigest in the
* constructor, so we can be reasonably sure that it's still going to exist if the stream gets reset.
*/
}
this.in.reset();
} |
python | def _coords_conv(self, pos):
"""For Svg coordinate system, reflect over X axis and
translate from center to top-left
"""
px = (self.original_size[0] / 2 + pos[0]) * self.scale_factor
py = (self.original_size[1] / 2 - pos[1]) * self.scale_factor
return round(px, 2), round(py, 2) |
python | def _ScopesFromMetadataServer(self, scopes):
"""Returns instance scopes based on GCE metadata server."""
if not util.DetectGce():
raise exceptions.ResourceUnavailableError(
'GCE credentials requested outside a GCE instance')
if not self.GetServiceAccount(self.__service_account_name):
raise exceptions.ResourceUnavailableError(
'GCE credentials requested but service account '
'%s does not exist.' % self.__service_account_name)
if scopes:
scope_ls = util.NormalizeScopes(scopes)
instance_scopes = self.GetInstanceScopes()
if scope_ls > instance_scopes:
raise exceptions.CredentialsError(
'Instance did not have access to scopes %s' % (
sorted(list(scope_ls - instance_scopes)),))
else:
scopes = self.GetInstanceScopes()
return scopes |
python | def sfiles_to_event(sfile_list):
"""
Write an event.dat file from a list of Seisan events
:type sfile_list: list
:param sfile_list: List of s-files to sort and put into the database
:returns: List of tuples of event ID (int) and Sfile name
"""
event_list = []
sort_list = [(readheader(sfile).origins[0].time, sfile)
for sfile in sfile_list]
sort_list.sort(key=lambda tup: tup[0])
sfile_list = [sfile[1] for sfile in sort_list]
catalog = Catalog()
for i, sfile in enumerate(sfile_list):
event_list.append((i, sfile))
catalog.append(readheader(sfile))
# Hand off to sister function
write_event(catalog)
return event_list |
java | public RecordType getOrCreateRecordType(Map<String, SoyType> fields) {
return recordTypes.intern(RecordType.of(fields));
} |
python | def emit(self, record):
"""
Override emit() method in handler parent for sending log to RESTful
API
"""
pid = os.getpid()
if pid != self.pid:
self.pid = pid
self.logs = []
self.timer = self._flushAndRepeatTimer()
atexit.register(self._stopFlushTimer)
# avoid infinite recursion
if record.name.startswith('requests'):
return
self.logs.append(self._prepPayload(record)) |
java | @Override
public CPMeasurementUnit findByUuid_C_First(String uuid, long companyId,
OrderByComparator<CPMeasurementUnit> orderByComparator)
throws NoSuchCPMeasurementUnitException {
CPMeasurementUnit cpMeasurementUnit = fetchByUuid_C_First(uuid,
companyId, orderByComparator);
if (cpMeasurementUnit != null) {
return cpMeasurementUnit;
}
StringBundler msg = new StringBundler(6);
msg.append(_NO_SUCH_ENTITY_WITH_KEY);
msg.append("uuid=");
msg.append(uuid);
msg.append(", companyId=");
msg.append(companyId);
msg.append("}");
throw new NoSuchCPMeasurementUnitException(msg.toString());
} |
python | def _count_spaces_startswith(line):
'''
Count the number of spaces before the first character
'''
if line.split('#')[0].strip() == "":
return None
spaces = 0
for i in line:
if i.isspace():
spaces += 1
else:
return spaces |
python | def get_attachments(self, ids, attachment_ids,
include_fields=None, exclude_fields=None):
"""
Wrapper for Bug.attachments. One of ids or attachment_ids is required
:param ids: Get attachments for this bug ID
:param attachment_ids: Specific attachment ID to get
https://bugzilla.readthedocs.io/en/latest/api/core/v1/attachment.html#get-attachment
"""
params = {
"ids": self._listify(ids) or [],
"attachment_ids": self._listify(attachment_ids) or [],
}
if include_fields:
params["include_fields"] = self._listify(include_fields)
if exclude_fields:
params["exclude_fields"] = self._listify(exclude_fields)
return self._proxy.Bug.attachments(params) |
python | def set_boolean(self, option, value):
"""Set a boolean option.
Args:
option (str): name of option.
value (bool): value of the option.
Raises:
TypeError: Value must be a boolean.
"""
if not isinstance(value, bool):
raise TypeError("%s must be a boolean" % option)
self.options[option] = str(value).lower() |
java | static final Properties getProperties(final KunderaMetadata kunderaMetadata, final String persistenceUnit)
{
PersistenceUnitMetadata persistenceUnitMetadatata = kunderaMetadata.getApplicationMetadata()
.getPersistenceUnitMetadata(persistenceUnit);
Properties props = persistenceUnitMetadatata.getProperties();
return props;
} |
java | public static <T extends Throwable> void printHistory(final String message, T th, final Logger logger) {
printHistory(new CouldNotPerformException(message, th), logger, LogLevel.ERROR);
} |
java | public static String getMessageLogLevel(final int level, final String defLevel) {
switch (level) {
case (Constants.ERR_LEVEL):
return Constants.MSG_ERR;
case (Constants.DEBUG_LEVEL):
return Constants.MSG_DEBUG;
case (Constants.INFO_LEVEL):
return Constants.MSG_INFO;
case (Constants.VERBOSE_LEVEL):
return Constants.MSG_VERBOSE;
case (Constants.WARN_LEVEL):
return Constants.MSG_WARN;
default:
return defLevel;
}
} |
python | def parse(self, data):
"""
Converts a dict representing an OLSR 0.6.x topology
to a NetworkX Graph object, which is then returned.
Additionally checks for "config" data in order to determine version and revision.
"""
graph = self._init_graph()
if 'topology' not in data:
raise ParserError('Parse error, "topology" key not found')
elif 'mid' not in data:
raise ParserError('Parse error, "mid" key not found')
# determine version and revision
if 'config' in data:
version_info = data['config']['olsrdVersion'].replace(' ', '').split('-')
self.version = version_info[1]
# try to get only the git hash
if 'hash_' in version_info[-1]:
version_info[-1] = version_info[-1].split('hash_')[-1]
self.revision = version_info[-1]
# process alias list
alias_dict = {}
for node in data['mid']:
local_addresses = [alias['ipAddress'] for alias in node['aliases']]
alias_dict[node['ipAddress']] = local_addresses
# loop over topology section and create networkx graph
for link in data['topology']:
try:
source = link['lastHopIP']
target = link['destinationIP']
cost = link['tcEdgeCost']
properties = {
'link_quality': link['linkQuality'],
'neighbor_link_quality': link['neighborLinkQuality']
}
except KeyError as e:
raise ParserError('Parse error, "%s" key not found' % e)
# add nodes with their local_addresses
for node in [source, target]:
if node not in alias_dict:
continue
graph.add_node(node, local_addresses=alias_dict[node])
# skip links with infinite cost
if cost == float('inf'):
continue
# original olsrd cost (jsoninfo multiplies by 1024)
cost = float(cost) / 1024.0
# add link to Graph
graph.add_edge(source, target, weight=cost, **properties)
return graph |
java | public void updateDrawableState(int state, boolean flag) {
final int oldState = mCombinedState;
// Update the combined state flag
if (flag) mCombinedState |= state;
else mCombinedState &= ~state;
// Set the combined state
if (oldState != mCombinedState) {
setState(VIEW_STATE_SETS[mCombinedState]);
}
} |
java | public static Connection newInstance(Connection conn) {
return (Connection) java.lang.reflect.Proxy.newProxyInstance(Connection.class.getClassLoader(), new Class[]{Connection.class},
new ConnectionProxy(conn));
/*
return (Connection) java.lang.reflect.Proxy.newProxyInstance(conn.getClass().getClassLoader(), conn.getClass().getInterfaces(),
new ConnectionProxy(conn));
*/
} |
python | def one_point_crossover(parents):
"""Perform one point crossover on two parent chromosomes.
Select a random position in the chromosome.
Take genes to the left from one parent and the rest from the other parent.
Ex. p1 = xxxxx, p2 = yyyyy, position = 2 (starting at 0), child = xxyyy
"""
# The point that the chromosomes will be crossed at (see Ex. above)
crossover_point = random.randint(1, len(parents[0]) - 1)
return (_one_parent_crossover(parents[0], parents[1], crossover_point),
_one_parent_crossover(parents[1], parents[0], crossover_point)) |
java | protected <T> CompletionStage<Optional<T>> doWithServiceImpl(String name, Descriptor.Call<?, ?> serviceCall, Function<URI, CompletionStage<T>> block) {
return locate(name, serviceCall).thenCompose(uri -> {
return uri
.map(u -> block.apply(u).thenApply(Optional::of))
.orElseGet(() -> CompletableFuture.completedFuture(Optional.empty()));
});
} |
python | def get_index(self, value):
"""
Return the index (or indices) of the given value (or values) in
`state_values`.
Parameters
----------
value
Value(s) to get the index (indices) for.
Returns
-------
idx : int or ndarray(int)
Index of `value` if `value` is a single state value; array
of indices if `value` is an array_like of state values.
"""
if self.state_values is None:
state_values_ndim = 1
else:
state_values_ndim = self.state_values.ndim
values = np.asarray(value)
if values.ndim <= state_values_ndim - 1:
return self._get_index(value)
elif values.ndim == state_values_ndim: # array of values
k = values.shape[0]
idx = np.empty(k, dtype=int)
for i in range(k):
idx[i] = self._get_index(values[i])
return idx
else:
raise ValueError('invalid value') |
java | private static void addLong(long input, int count, int startPos, byte[] dest) {
if(DEBUG_LEV > 30)
System.err.println("EncodedElement::addLong : Begin");
int currentByte = startPos/8;
int currentOffset = startPos%8;
int bitRoom;//how many bits can be placed in current byte
long upMask;//to clear upper bits(lower bits auto-cleared by L-shift
int downShift;//bits to shift down, isolating top bits of input
int upShift;//bits to shift up, packing byte from top.
while(count > 0) {
//find how many bits can be placed in current byte
bitRoom = 8-currentOffset;
//get those bits
//i.e, take upper 'bitsNeeded' of input, put to lower part of byte.
downShift = count-bitRoom;
upMask = 255 >>> currentOffset;
upShift = 0;
if(downShift < 0) {
//upMask = 255 >>> bitRoom-count;
upShift = bitRoom - count;
upMask = 255 >>> (currentOffset+upShift);
downShift = 0;
}
if(DEBUG_LEV > 30) {
System.err.println("count:offset:bitRoom:downShift:upShift:" +
count+":"+currentOffset+":"+bitRoom+":"+downShift+":"+upShift);
}
long currentBits = (input >>> downShift) & (upMask);
//shift bits back up to match offset
currentBits = currentBits << upShift;
upMask = (byte)upMask << upShift;
dest[currentByte] = (byte)(dest[currentByte] & (~upMask));
//merge bytes~
dest[currentByte] = (byte)(dest[currentByte] | currentBits);
//System.out.println("new currentByte: " + dest[currentByte]);
count -= bitRoom;
currentOffset = 0;
currentByte++;
}
if(DEBUG_LEV > 30)
System.err.println("EncodedElement::addLong : End");
} |
python | def plot_kde(self,
ax=None,
amax=None,
amin=None,
label=None,
return_fig=False):
"""
Plot a KDE for the curve. Very nice summary of KDEs:
https://jakevdp.github.io/blog/2013/12/01/kernel-density-estimation/
Args:
ax (axis): Optional matplotlib (MPL) axis to plot into. Returned.
amax (float): Optional max value to permit.
amin (float): Optional min value to permit.
label (string): What to put on the y-axis. Defaults to curve name.
return_fig (bool): If you want to return the MPL figure object.
Returns:
None, axis, figure: depending on what you ask for.
"""
from scipy.stats import gaussian_kde
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
return_ax = False
else:
return_ax = True
a = self[~np.isnan(self)]
# Find values for common axis to exclude outliers.
if amax is None:
amax = np.percentile(a, 99)
if amin is None:
amin = np.percentile(a, 1)
x = a[np.abs(a - 0.5 * (amax + amin)) < 0.5 * (amax - amin)]
x_grid = np.linspace(amin, amax, 100)
kde = gaussian_kde(x)
std_a = kde.evaluate(x_grid)
img = np.array([std_a]) / np.max([std_a])
extent = [amin, amax, 0, 1]
ax.imshow(img, aspect='auto', cmap='viridis', extent=extent)
ax.set_yticklabels([])
ax.set_ylabel(label or self.mnemonic)
if return_ax:
return ax
elif return_fig:
return fig
else:
return None |
java | public void delete_device_attribute_property(Database database, String deviceName, DbAttribute[] attribute)
throws DevFailed {
for (DbAttribute att : attribute)
delete_device_attribute_property(database, deviceName,
att.name, att.get_property_list());
}
//==========================================================================
/* (non-Javadoc)
* @see fr.esrf.TangoApi.IDatabaseDAO#delete_device_attribute_property(java.lang.String, java.lang.String, java.lang.String[])
*/
//==========================================================================
public void delete_device_attribute_property(Database database, String deviceName, String attname, String[] propnames)
throws DevFailed {
if (propnames.length==0)
return;
if (!database.isAccess_checked()) checkAccess(database);
// Build a String array before command
String[] array = new String[2 + propnames.length];
array[0] = deviceName;
array[1] = attname;
System.arraycopy(propnames, 0, array, 2, propnames.length);
DeviceData argIn = new DeviceData();
argIn.insert(array);
command_inout(database, "DbDeleteDeviceAttributeProperty", argIn);
} |
java | @Override
protected int handlePrevious(int position) {
if (pattern_.CELength_ == 0) {
search_.matchedIndex_ =
search_.matchedIndex_ == DONE ? getIndex() : search_.matchedIndex_;
if (search_.matchedIndex_ == search_.beginIndex()) {
setMatchNotFound();
} else {
search_.matchedIndex_--;
textIter_.setOffset(search_.matchedIndex_);
search_.setMatchedLength(0);
}
} else {
textIter_.setOffset(position);
if (search_.isCanonicalMatch_) {
// *could* use exact match here since extra accents *not* allowed!
handlePreviousCanonical();
} else {
handlePreviousExact();
}
}
return search_.matchedIndex_;
} |
python | def _get_traversal_children(self, name):
"""
Retrieve component and subcomponent indexes from the given traversal path
(e.g. PID_1_2 -> component=2, subcomponent=None)
"""
name = name.upper()
parts = name.split('_')
try:
assert 3 <= len(parts) <= 4
prefix = "{0}_{1}".format(parts[0], parts[1])
component = int(parts[2])
subcomponent = int(parts[3]) if len(parts) == 4 else None
except (AssertionError, ValueError):
return None, None
else:
if prefix != self.name:
return None, None
return component, subcomponent |
java | public Upload upload(final PutObjectRequest putObjectRequest)
throws AmazonServiceException, AmazonClientException {
return doUpload(putObjectRequest, null, null, null);
} |
python | def transform_index_to_physical_point(image, index):
"""
Get spatial point from index of an image.
ANTsR function: `antsTransformIndexToPhysicalPoint`
Arguments
---------
img : ANTsImage
image to get values from
index : list or tuple or numpy.ndarray
location in image
Returns
-------
tuple
Example
-------
>>> import ants
>>> import numpy as np
>>> img = ants.make_image((10,10),np.random.randn(100))
>>> pt = ants.transform_index_to_physical_point(img, (2,2))
"""
if not isinstance(image, iio.ANTsImage):
raise ValueError('image must be ANTsImage type')
if isinstance(index, np.ndarray):
index = index.tolist()
if not isinstance(index, (tuple,list)):
raise ValueError('index must be tuple or list')
if len(index) != image.dimension:
raise ValueError('len(index) != image.dimension')
index = [i+1 for i in index]
ndim = image.dimension
ptype = image.pixeltype
libfn = utils.get_lib_fn('TransformIndexToPhysicalPoint%s%i' % (utils.short_ptype(ptype), ndim))
point = libfn(image.pointer, [list(index)])
return np.array(point[0]) |
python | def clear_published_date(self):
"""Removes the puiblished date.
raise: NoAccess - ``Metadata.isRequired()`` is ``true`` or
``Metadata.isReadOnly()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.assessment.AssessmentOfferedForm.clear_start_time_template
if (self.get_published_date_metadata().is_read_only() or
self.get_published_date_metadata().is_required()):
raise errors.NoAccess()
self._my_map['publishedDate'] = self._published_date_default |
java | public static String getStatusAsString(int status) {
if (status == STATUS_PENDING) {
return "PENDING";
} else if (status == STATUS_ACTIVE) {
return "ACTIVE";
} else if (status == STATUS_DONE) {
return "DONE";
} else if (status == STATUS_FAILED) {
return "FAILED";
} else if (status == STATUS_SUSPENDED) {
return "SUSPENDED";
} else if (status == STATUS_UNSUBMITTED) {
return "UNSUBMITTED";
} else if (status == STATUS_STAGE_IN) {
return "STAGE_IN";
} else if (status == STATUS_STAGE_OUT) {
return "STAGE_OUT";
}
return "Unknown";
} |
java | @Override
protected <T> T parseEntity(Class<T> entityClass, HttpResponse httpResponse) throws IOException {
return GSON.fromJson(new InputStreamReader(httpResponse.getEntity().getContent()), entityClass);
} |
java | @Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (CollectionUtils.isNotEmpty(annotations)) {
Set<? extends Element> elements = roundEnv.getElementsAnnotatedWith(Interceptor.class);
try {
parseInterceptors(elements);
} catch (Exception e) {
logger.error(e);
}
return true;
}
return false;
} |
java | @Override
public CreateEgressOnlyInternetGatewayResult createEgressOnlyInternetGateway(CreateEgressOnlyInternetGatewayRequest request) {
request = beforeClientExecution(request);
return executeCreateEgressOnlyInternetGateway(request);
} |
java | public static synchronized XMLEventReader createStringReader(final String paramString)
throws IOException, XMLStreamException {
final XMLInputFactory factory = XMLInputFactory.newInstance();
factory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
final InputStream in = new ByteArrayInputStream(paramString.getBytes());
return factory.createXMLEventReader(in);
} |
java | public static TokenRegex compile(String pattern) throws ParseException {
ExpressionIterator p = QueryToPredicate.PARSER.parse(pattern);
Expression exp;
TransitionFunction top = null;
while ((exp = p.next()) != null) {
if (top == null) {
top = consumerize(exp);
} else {
top = new TransitionFunction.Sequence(top, consumerize(exp));
}
}
return new TokenRegex(top);
} |
java | public List<String> asMulti() {
if (values.isEmpty()) {
return Collections.emptyList();
}
List<String> multi = new ArrayList<String>(values.size());
for (JsonValue value : values) {
if (value.isNull()) {
multi.add("");
continue;
}
Object obj = value.getValue();
if (obj != null) {
multi.add(obj.toString());
continue;
}
}
return multi;
} |
java | private JPanel getPDBFilePanel(int pos ,JTextField f, JTextField c){
JPanel panel = new JPanel();
panel.setBorder(BorderFactory.createLineBorder(Color.black));
JLabel l01 = new JLabel("PDB code ");
panel.add(l01);
Box hBox11 = Box.createHorizontalBox();
JLabel l11 = new JLabel(pos + ":");
f.setMaximumSize(new Dimension(Short.MAX_VALUE,30));
hBox11.add(l11);
hBox11.add(Box.createVerticalGlue());
hBox11.add(f, BorderLayout.CENTER);
hBox11.add(Box.createVerticalGlue());
panel.add(hBox11);
Box hBox21 = Box.createHorizontalBox();
JLabel l21 = new JLabel("Chain" + pos + ":");
c.setMaximumSize(new Dimension(Short.MAX_VALUE,30));
hBox21.add(l21);
hBox21.add(Box.createGlue());
hBox21.add(c, BorderLayout.CENTER);
hBox21.add(Box.createGlue());
panel.add(hBox21);
return panel;
} |
java | public static synchronized void chdir(String path) {
if (path != null) {
rootDir = new File(getAbsolutePath(path)).getAbsolutePath();
}
} |
python | def on_sigint(self, sig, frame):
"""
We got SIGINT signal.
"""
if self.stop_requested or self.stop_requested_force:
# signal has already been sent or we force a shutdown.
# handles the keystroke 2x CTRL+C to force an exit.
self.stop_requested_force = True
self.logger.warning('Force stopped: ' + str(sig))
# just kill the process, we don't care about the results
self.on_force_exit()
os._exit(1)
# with force_exit we really close the process, killing it in unknown state
# self.fail('Force stopped', force_exit=True)
# return
if self.is_master_process():
self.logger.warning('Received signal '+str(sig)+'. Send again to force stop. Stopping ...')
else:
self.logger.debug("Got child signal " + str(sig))
self.stop_requested = True
# the default SIGINT handle in python is not always installed, so we can't rely on the
# KeyboardInterrupt exception to be thrown.
# thread.interrupt_main would call sigint again.
# the shutdown listener will do the rest like committing rest memory files into Git and closing connections.
sys.exit(0 if self.in_early_stop else 1) |
java | public static void addExtendedProperty(String propName, String dataType, boolean multiValued, Object defaultValue) {
if (dataType == null || "null".equalsIgnoreCase(dataType))
return;
if (extendedPropertiesDataType.containsKey(propName)) {
Tr.warning(tc, WIMMessageKey.DUPLICATE_PROPERTY_EXTENDED, new Object[] { propName, "PersonAccount" });
return;
}
if (getPropertyNames("PersonAccount").contains(propName)) {
Tr.warning(tc, WIMMessageKey.DUPLICATE_PROPERTY_ENTITY, new Object[] { propName, "PersonAccount" });
return;
}
extendedPropertiesDataType.put(propName, dataType);
if (defaultValue != null)
extendedPropertiesDefaultValue.put(propName, defaultValue);
if (multiValued)
extendedMultiValuedProperties.add(propName);
} |
java | public void setUserPassword(String password) {
try {
getSipURI().setUserPassword(password);
} catch (ParseException e) {
logger.error("error setting parameter ", e);
throw new IllegalArgumentException("Bad arg", e );
}
} |
java | public static void addSuccessAndErrorConditionsAndActions(final JpaRolloutGroup group,
final RolloutGroupConditions conditions) {
addSuccessAndErrorConditionsAndActions(group, conditions.getSuccessCondition(),
conditions.getSuccessConditionExp(), conditions.getSuccessAction(), conditions.getSuccessActionExp(),
conditions.getErrorCondition(), conditions.getErrorConditionExp(), conditions.getErrorAction(),
conditions.getErrorActionExp());
} |
java | protected TupleWritable createInternalValue() {
Writable[] vals = new Writable[kids.length];
for (int i = 0; i < vals.length; ++i) {
vals[i] = kids[i].createValue();
}
return new TupleWritable(vals);
} |
python | def is_local(self):
"""Return true is the partition file is local"""
from ambry.orm.exc import NotFoundError
try:
if self.local_datafile.exists:
return True
except NotFoundError:
pass
return False |
java | public void setMaxHeaderTableSize(ByteBuf out, long maxHeaderTableSize) throws Http2Exception {
if (maxHeaderTableSize < MIN_HEADER_TABLE_SIZE || maxHeaderTableSize > MAX_HEADER_TABLE_SIZE) {
throw connectionError(PROTOCOL_ERROR, "Header Table Size must be >= %d and <= %d but was %d",
MIN_HEADER_TABLE_SIZE, MAX_HEADER_TABLE_SIZE, maxHeaderTableSize);
}
if (this.maxHeaderTableSize == maxHeaderTableSize) {
return;
}
this.maxHeaderTableSize = maxHeaderTableSize;
ensureCapacity(0);
// Casting to integer is safe as we verified the maxHeaderTableSize is a valid unsigned int.
encodeInteger(out, 0x20, 5, maxHeaderTableSize);
} |
python | def variance(arg, where=None, how='sample'):
"""
Compute standard deviation of numeric array
Parameters
----------
how : {'sample', 'pop'}, default 'sample'
Returns
-------
stdev : double scalar
"""
expr = ops.Variance(arg, how, where).to_expr()
expr = expr.name('var')
return expr |
java | public javax.sip.address.SipURI createRecordRouteURI(boolean usePublicAddress) {
try {
String host = getIpAddress(usePublicAddress);
SipURI sipUri = SipFactoryImpl.addressFactory.createSipURI(null, host);
sipUri.setPort(port);
sipUri.setTransportParam(transport);
// Do we want to add an ID here?
return sipUri;
} catch (ParseException ex) {
logger.error ("Unexpected error while creating a record route URI",ex);
throw new IllegalArgumentException("Unexpected exception when creating a record route URI", ex);
}
} |
java | public ResourceImpl getSubResource(String name, Object parameter) {
SubResourceGetterModel getter =
resourceModel.getSubResourceGetter(name);
if (getter == null) {
throw new UnsupportedOperationException(
"No sub-resource named " + name);
}
Map<String, Object> ids = new HashMap<>();
if (getter.getParameterMapping() != null) {
ids.put(getter.getParameterMapping().getTarget(), parameter);
}
if (getter.getIdentifierMappings() != null) {
for (FlatMapping mapping : getter.getIdentifierMappings()) {
Object value = identifiers.get(mapping.getSource());
if (value == null) {
throw new IllegalStateException(
"The " + name + " subresource model has a mapping "
+ "for the " + mapping.getSource() + " identifier, but "
+ "this resource doesn't have an identifier of that "
+ "name!");
}
ids.put(mapping.getTarget(), value);
}
}
ResourceModel refTypeModel = serviceModel.getResource(name);
return new ResourceImpl(serviceModel, refTypeModel, client, ids);
} |
java | public void init(Record record, int iLastModifiedToSet, boolean bStart, boolean bEnd)
{
super.init(record, null, null, null, null, null, null);
this.setInitialKey(bStart);
this.setEndKey(bEnd);
m_iLastModifiedToSet = iLastModifiedToSet;
} |
java | public final Promise clean(String match) {
ScanArgs args = new ScanArgs();
args.limit(100);
boolean singleStar = match.indexOf('*') > -1;
boolean doubleStar = match.contains("**");
if (doubleStar) {
args.match(match.replace("**", "*"));
} else if (singleStar) {
if (match.length() > 1 && match.indexOf('.') == -1) {
match += '*';
}
args.match(match);
} else {
args.match(match);
}
if (!singleStar || doubleStar) {
match = null;
}
if (client != null) {
return new Promise(clean(client.scan(args), args, match));
}
if (clusteredClient != null) {
return new Promise(clean(clusteredClient.scan(args), args, match));
}
return Promise.resolve();
} |
python | def prepare_amazon_algorithm_estimator(estimator, inputs, mini_batch_size=None):
""" Set up amazon algorithm estimator, adding the required `feature_dim` hyperparameter from training data.
Args:
estimator (sagemaker.amazon.amazon_estimator.AmazonAlgorithmEstimatorBase):
An estimator for a built-in Amazon algorithm to get information from and update.
inputs: The training data.
* (sagemaker.amazon.amazon_estimator.RecordSet) - A collection of
Amazon :class:~`Record` objects serialized and stored in S3.
For use with an estimator for an Amazon algorithm.
* (list[sagemaker.amazon.amazon_estimator.RecordSet]) - A list of
:class:~`sagemaker.amazon.amazon_estimator.RecordSet` objects, where each instance is
a different channel of training data.
"""
if isinstance(inputs, list):
for record in inputs:
if isinstance(record, amazon_estimator.RecordSet) and record.channel == 'train':
estimator.feature_dim = record.feature_dim
break
elif isinstance(inputs, amazon_estimator.RecordSet):
estimator.feature_dim = inputs.feature_dim
else:
raise TypeError('Training data must be represented in RecordSet or list of RecordSets')
estimator.mini_batch_size = mini_batch_size |
java | public T acquire() {
if (closed) {
throw new IllegalStateException("pool closed");
}
T reference = pool.poll();
if (reference == null) {
reference = factory.createReference(this);
}
reference.acquire();
return reference;
} |
java | private <T extends Property> Map<String, String> getOverriddenImplementationConfiguration(
Collection<T> overridableProperties) {
Map<String, String> ret = new HashMap<>();
overridableProperties.forEach(p -> {
String val = getProperty(p, null);
if (val != null) {
ret.put(p.getPropertyName(), val);
}
});
implementationConfiguration.forEach(ret::putIfAbsent);
return ret;
} |
java | @Override
public void addComments(ExecutableElement property, Content propertyDocTree) {
TypeElement holder = (TypeElement)property.getEnclosingElement();
if (!utils.getFullBody(property).isEmpty()) {
if (holder.equals(typeElement) ||
(!utils.isPublic(holder) || utils.isLinkable(holder))) {
writer.addInlineComment(property, propertyDocTree);
} else {
Content link =
writer.getDocLink(LinkInfoImpl.Kind.PROPERTY_COPY,
holder, property,
utils.isIncluded(holder)
? holder.getSimpleName() : holder.getQualifiedName(),
false);
Content codeLink = HtmlTree.CODE(link);
Content descfrmLabel = HtmlTree.SPAN(HtmlStyle.descfrmTypeLabel,
utils.isClass(holder)
? contents.descfrmClassLabel
: contents.descfrmInterfaceLabel);
descfrmLabel.addContent(Contents.SPACE);
descfrmLabel.addContent(codeLink);
propertyDocTree.addContent(HtmlTree.DIV(HtmlStyle.block, descfrmLabel));
writer.addInlineComment(property, propertyDocTree);
}
}
} |
java | static final int coupon16(final byte[] identifier) {
final long[] hash = MurmurHash3.hash(identifier, SEED);
final int hllIdx = (int) (((hash[0] >>> 1) % 1024) & TEN_BIT_MASK); //hash[0] for 10-bit address
final int lz = Long.numberOfLeadingZeros(hash[1]);
final int value = (lz > 62 ? 62 : lz) + 1;
return (value << 10) | hllIdx;
} |
java | public void saveTxt(String file) throws Exception {
FileOutputStream fos = new FileOutputStream(file);
BufferedWriter bout = new BufferedWriter(new OutputStreamWriter(
fos, "UTF8"));
bout.write(this.toString());
bout.close();
} |
python | def folderitem(self, obj, item, index):
"""Applies new properties to the item (Batch) that is currently being
rendered as a row in the list
:param obj: client to be rendered as a row in the list
:param item: dict representation of the batch, suitable for the list
:param index: current position of the item within the list
:type obj: ATContentType/DexterityContentType
:type item: dict
:type index: int
:return: the dict representation of the item
:rtype: dict
"""
obj = api.get_object(obj)
url = "{}/analysisrequests".format(api.get_url(obj))
bid = api.get_id(obj)
cbid = obj.getClientBatchID()
title = api.get_title(obj)
client = obj.getClient()
created = api.get_creation_date(obj)
date = obj.getBatchDate()
item["BatchID"] = bid
item["ClientBatchID"] = cbid
item["replace"]["BatchID"] = get_link(url, bid)
item["Title"] = title
item["replace"]["Title"] = get_link(url, title)
item["created"] = self.ulocalized_time(created, long_format=True)
item["BatchDate"] = self.ulocalized_time(date, long_format=True)
if client:
client_url = api.get_url(client)
client_name = client.getName()
client_id = client.getClientID()
item["Client"] = client_name
item["ClientID"] = client_id
item["replace"]["Client"] = get_link(client_url, client_name)
item["replace"]["ClientID"] = get_link(client_url, client_id)
return item |
java | public void marshall(GetSubscriptionStateRequest getSubscriptionStateRequest, ProtocolMarshaller protocolMarshaller) {
if (getSubscriptionStateRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public static List<String> replaceNullValueWithEmptyGroup(@Nonnull final List<String> groups) {
Check.notNull(groups, "groups");
final List<String> result = new ArrayList<String>(groups.size());
for (final String group : groups) {
if (group == null) {
result.add(EMPTY_GROUP);
} else {
result.add(group);
}
}
for (int i = result.size(); i < MIN_GROUP_SIZE; i++) {
result.add(EMPTY_GROUP);
}
return result;
} |
python | def request(self, endpoint, method='GET', params=None, version='1.1', json_encoded=False):
"""Return dict of response received from Twitter's API
:param endpoint: (required) Full url or Twitter API endpoint
(e.g. search/tweets)
:type endpoint: string
:param method: (optional) Method of accessing data, either
GET, POST or DELETE. (default GET)
:type method: string
:param params: (optional) Dict of parameters (if any) accepted
the by Twitter API endpoint you are trying to
access (default None)
:type params: dict or None
:param version: (optional) Twitter API version to access
(default 1.1)
:type version: string
:param json_encoded: (optional) Flag to indicate if this method should send data encoded as json
(default False)
:type json_encoded: bool
:rtype: dict
"""
if endpoint.startswith('http://'):
raise TwythonError('api.twitter.com is restricted to SSL/TLS traffic.')
# In case they want to pass a full Twitter URL
# i.e. https://api.twitter.com/1.1/search/tweets.json
if endpoint.startswith('https://'):
url = endpoint
else:
url = '%s/%s.json' % (self.api_url % version, endpoint)
content = self._request(url, method=method, params=params,
api_call=url, json_encoded=json_encoded)
return content |
python | def is_ssh_available(host, port=22):
""" checks if ssh port is open """
s = socket.socket()
try:
s.connect((host, port))
return True
except:
return False |
java | @Override
public int compareTo(Instant otherInstant) {
int cmp = Long.compare(seconds, otherInstant.seconds);
if (cmp != 0) {
return cmp;
}
return nanos - otherInstant.nanos;
} |
python | def reset_network(message):
"""Resets the users network to make changes take effect"""
for command in settings.RESTART_NETWORK:
try:
subprocess.check_call(command)
except:
pass
print(message) |
python | def which_api_version(self, api_call):
""" Return QualysGuard API version for api_call specified.
"""
# Leverage patterns of calls to API methods.
if api_call.endswith('.php'):
# API v1.
return 1
elif api_call.startswith('api/2.0/'):
# API v2.
return 2
elif '/am/' in api_call:
# Asset Management API.
return 'am'
elif '/was/' in api_call:
# WAS API.
return 'was'
return False |
java | public boolean hasProperty(String category, String key) {
return this.categories.containsKey(category) && this.categories.get(category).containsKey(key);
} |
java | public static JavaRDD<String> listPaths(JavaSparkContext sc, String path, boolean recursive) throws IOException {
//NativeImageLoader.ALLOWED_FORMATS
return listPaths(sc, path, recursive, (Set<String>)null);
} |
java | public Observable<Page<JobVersionInner>> listByJobNextAsync(final String nextPageLink) {
return listByJobNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<JobVersionInner>>, Page<JobVersionInner>>() {
@Override
public Page<JobVersionInner> call(ServiceResponse<Page<JobVersionInner>> response) {
return response.body();
}
});
} |
java | public void marshall(Type type, ProtocolMarshaller protocolMarshaller) {
if (type == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(type.getName(), NAME_BINDING);
protocolMarshaller.marshall(type.getDescription(), DESCRIPTION_BINDING);
protocolMarshaller.marshall(type.getArn(), ARN_BINDING);
protocolMarshaller.marshall(type.getDefinition(), DEFINITION_BINDING);
protocolMarshaller.marshall(type.getFormat(), FORMAT_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def play(self, source, *, after=None):
"""Plays an :class:`AudioSource`.
The finalizer, ``after`` is called after the source has been exhausted
or an error occurred.
If an error happens while the audio player is running, the exception is
caught and the audio player is then stopped.
Parameters
-----------
source: :class:`AudioSource`
The audio source we're reading from.
after
The finalizer that is called after the stream is exhausted.
All exceptions it throws are silently discarded. This function
must have a single parameter, ``error``, that denotes an
optional exception that was raised during playing.
Raises
-------
ClientException
Already playing audio or not connected.
TypeError
source is not a :class:`AudioSource` or after is not a callable.
"""
if not self.is_connected():
raise ClientException('Not connected to voice.')
if self.is_playing():
raise ClientException('Already playing audio.')
if not isinstance(source, AudioSource):
raise TypeError('source must an AudioSource not {0.__class__.__name__}'.format(source))
self._player = AudioPlayer(source, self, after=after)
self._player.start() |
java | public static byte[] getQualifier(final int level, final int order) {
final byte[] suffix = (level + ":" + order).getBytes(CHARSET);
final byte[] qualifier = new byte[RULE_PREFIX.length + suffix.length];
System.arraycopy(RULE_PREFIX, 0, qualifier, 0, RULE_PREFIX.length);
System.arraycopy(suffix, 0, qualifier, RULE_PREFIX.length, suffix.length);
return qualifier;
} |
python | def strace_data_access_event(self,
operation,
address,
data,
data_mask=None,
access_width=4,
address_range=0):
"""Sets an event to trigger trace logic when data access is made.
Data access corresponds to either a read or write.
Args:
self (JLink): the ``JLink`` instance.
operation (int): one of the operations in ``JLinkStraceOperation``.
address (int): the address of the load/store data.
data (int): the data to be compared the event data to.
data_mask (int): optional bitmask specifying bits to ignore in
comparison.
acess_width (int): optional access width for the data.
address_range (int): optional range of address to trigger event on.
Returns:
An integer specifying the trace event handle. This handle should be
retained in order to clear the event at a later time.
Raises:
JLinkException: on error.
"""
cmd = enums.JLinkStraceCommand.TRACE_EVENT_SET
event_info = structs.JLinkStraceEventInfo()
event_info.Type = enums.JLinkStraceEvent.DATA_ACCESS
event_info.Op = operation
event_info.AccessSize = int(access_width)
event_info.Addr = int(address)
event_info.Data = int(data)
event_info.DataMask = int(data_mask or 0)
event_info.AddrRangeSize = int(address_range)
handle = self._dll.JLINK_STRACE_Control(cmd, ctypes.byref(event_info))
if handle < 0:
raise errors.JLinkException(handle)
return handle |
python | def get_extension_classes(sort, extra_extension_paths=None):
"""
Banana banana
"""
all_classes = {}
deps_map = {}
for entry_point in pkg_resources.iter_entry_points(
group='hotdoc.extensions', name='get_extension_classes'):
if entry_point.module_name == 'hotdoc_c_extension.extensions':
continue
try:
activation_function = entry_point.load()
classes = activation_function()
# pylint: disable=broad-except
except Exception as exc:
info("Failed to load %s" % entry_point.module_name, exc)
debug(traceback.format_exc())
continue
for klass in classes:
all_classes[klass.extension_name] = klass
if extra_extension_paths:
for klass in __get_extra_extension_classes(extra_extension_paths):
all_classes[klass.extension_name] = klass
klass_list = list(all_classes.values())
if not sort:
return klass_list
for i, klass in enumerate(klass_list):
deps = klass.get_dependencies()
topodeps = set()
for dep in deps:
if dep.dependency_name not in all_classes:
if dep.optional:
continue
else:
error("setup-issue",
"Missing dependency %s for %s" %
(dep.dependency_name, klass.extension_name))
if dep.is_upstream:
topodeps.add(
klass_list.index(all_classes[dep.dependency_name]))
deps_map[i] = topodeps
sorted_class_indices = toposort_flatten(deps_map)
sorted_classes = [klass_list[i] for i in sorted_class_indices]
return sorted_classes |
python | def copy_files(src, ext, dst):
""" Copies files with extensions "ext" from "src" to "dst" directory. """
src_path = os.path.join(os.path.dirname(__file__), src)
dst_path = os.path.join(os.path.dirname(__file__), dst)
file_list = os.listdir(src_path)
for f in file_list:
if f == '__init__.py':
continue
f_path = os.path.join(src_path, f)
if os.path.isfile(f_path) and f.endswith(ext):
shutil.copy(f_path, dst_path) |
java | @Override
public final synchronized List<TrialBalanceLine> retrieveTrialBalance(
final Map<String, Object> pAddParam,
final Date pDate) throws Exception {
recalculateAllIfNeed(pAddParam, pDate);
List<TrialBalanceLine> result = new ArrayList<TrialBalanceLine>();
String query = evalQueryBalance(pAddParam, pDate);
IRecordSet<RS> recordSet = null;
try {
recordSet = getSrvDatabase().retrieveRecords(query);
if (recordSet.moveToFirst()) {
do {
String accName = recordSet
.getString("ITSNAME");
String accNumber = recordSet
.getString("ITSNUMBER");
String subaccName = recordSet
.getString("SUBACC");
Double debit = recordSet
.getDouble("DEBIT");
Double credit = recordSet
.getDouble("CREDIT");
if (debit != 0 || credit != 0) {
TrialBalanceLine tbl = new TrialBalanceLine();
tbl.setAccName(accName);
tbl.setAccNumber(accNumber);
tbl.setSubaccName(subaccName);
tbl.setDebit(BigDecimal.valueOf(debit).setScale(
getSrvAccSettings().lazyGetAccSettings(pAddParam)
.getBalancePrecision(), getSrvAccSettings()
.lazyGetAccSettings(pAddParam).getRoundingMode()));
tbl.setCredit(BigDecimal.valueOf(credit).setScale(
getSrvAccSettings().lazyGetAccSettings(pAddParam)
.getBalancePrecision(), getSrvAccSettings()
.lazyGetAccSettings(pAddParam).getRoundingMode()));
if (tbl.getDebit().doubleValue() != 0
|| tbl.getCredit().doubleValue() != 0) {
result.add(tbl);
}
}
} while (recordSet.moveToNext());
}
} finally {
if (recordSet != null) {
recordSet.close();
}
}
//account totals:
BigDecimal debitAcc = BigDecimal.ZERO;
BigDecimal creditAcc = BigDecimal.ZERO;
String accCurr = null;
int lineCurr = 0;
int lineStartAcc = 0;
for (TrialBalanceLine tbl : result) {
if (!tbl.getAccNumber().equals(accCurr)) {
//save to old
if (accCurr != null) {
for (int j = lineStartAcc; j < lineCurr; j++) {
result.get(j).setDebitAcc(debitAcc);
result.get(j).setCreditAcc(creditAcc);
}
}
//init new acc:
lineStartAcc = lineCurr;
accCurr = tbl.getAccNumber();
}
debitAcc = debitAcc.add(tbl.getDebit());
creditAcc = creditAcc.add(tbl.getCredit());
lineCurr++;
}
return result;
} |
java | private static void checkMandatoryProperties( final Analyzer analyzer,
final Jar jar,
final String symbolicName )
{
final String importPackage = analyzer.getProperty( Analyzer.IMPORT_PACKAGE );
if( importPackage == null || importPackage.trim().length() == 0 )
{
analyzer.setProperty( Analyzer.IMPORT_PACKAGE, "*;resolution:=optional" );
}
final String exportPackage = analyzer.getProperty( Analyzer.EXPORT_PACKAGE );
if( exportPackage == null || exportPackage.trim().length() == 0 )
{
analyzer.setProperty( Analyzer.EXPORT_PACKAGE, "*" );
}
final String localSymbolicName = analyzer.getProperty( Analyzer.BUNDLE_SYMBOLICNAME, symbolicName );
analyzer.setProperty( Analyzer.BUNDLE_SYMBOLICNAME, generateSymbolicName( localSymbolicName ) );
} |
java | @CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
public final <R> Maybe<R> flatMapSingleElement(final Function<? super T, ? extends SingleSource<? extends R>> mapper) {
ObjectHelper.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new MaybeFlatMapSingleElement<T, R>(this, mapper));
} |
python | def init_config(self, app):
"""Initialize configuration.
:param app: An instance of :class:`~flask.Flask`.
"""
app.config.setdefault('REQUIREJS_BASEURL', app.static_folder)
app.config.setdefault('COLLECT_STATIC_ROOT', app.static_folder)
app.config.setdefault('COLLECT_STORAGE', 'flask_collect.storage.link')
app.config.setdefault(
'COLLECT_FILTER', partial(collect_staticroot_removal, app))
app.config.setdefault(
'WEBPACKEXT_PROJECT', 'invenio_assets.webpack:project') |
python | def _list_linodes(full=False):
'''
Helper function to format and parse linode data
'''
nodes = _query('linode', 'list')['DATA']
ips = get_ips()
ret = {}
for node in nodes:
this_node = {}
linode_id = six.text_type(node['LINODEID'])
this_node['id'] = linode_id
this_node['image'] = node['DISTRIBUTIONVENDOR']
this_node['name'] = node['LABEL']
this_node['size'] = node['TOTALRAM']
state = int(node['STATUS'])
this_node['state'] = _get_status_descr_by_id(state)
for key, val in six.iteritems(ips):
if key == linode_id:
this_node['private_ips'] = val['private_ips']
this_node['public_ips'] = val['public_ips']
if full:
this_node['extra'] = node
ret[node['LABEL']] = this_node
return ret |
python | def get_vehicle_health_report(session, vehicle_index):
"""Get complete vehicle health report."""
profile = get_profile(session)
_validate_vehicle(vehicle_index, profile)
return session.get(VHR_URL, params={
'uuid': profile['vehicles'][vehicle_index]['uuid']
}).json() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.