language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def calc_temperature_stats(self):
"""
Calculates statistics in order to derive diurnal patterns of temperature
"""
self.temp.max_delta = melodist.get_shift_by_data(self.data.temp, self._lon, self._lat, self._timezone)
self.temp.mean_course = melodist.util.calculate_mean_daily_course_by_month(self.data.temp, normalize=True) |
python | def init_session(self):
"""
Defines a session object for passing requests.
"""
if self.session:
self.session.close()
self.session = make_session(self.username,
self.password,
self.bearer_token,
self.extra_headers_dict) |
python | def _write_source_code(tlobject, kind, builder, type_constructors):
"""
Writes the source code corresponding to the given TLObject
by making use of the ``builder`` `SourceBuilder`.
Additional information such as file path depth and
the ``Type: [Constructors]`` must be given for proper
importing and documentation strings.
"""
_write_class_init(tlobject, kind, type_constructors, builder)
_write_resolve(tlobject, builder)
_write_to_dict(tlobject, builder)
_write_to_bytes(tlobject, builder)
_write_from_reader(tlobject, builder)
_write_read_result(tlobject, builder) |
java | HorizontalLayout addInfoLayout(String key, Object value, boolean editable) {
HorizontalLayout res = new HorizontalLayout();
res.setWidth("100%");
res.setSpacing(true);
TextField keyField = new TextField();
keyField.setValue(key);
keyField.setEnabled(editable);
keyField.setWidth("100%");
TextField valueField = new TextField();
valueField.setValue(value.toString());
valueField.setEnabled(editable);
valueField.setWidth("100%");
res.addComponent(keyField);
res.addComponent(valueField);
res.setExpandRatio(keyField, 1);
res.setExpandRatio(valueField, 1);
return res;
} |
java | public java.util.List<String> getExecutableUsers() {
if (executableUsers == null) {
executableUsers = new com.amazonaws.internal.SdkInternalList<String>();
}
return executableUsers;
} |
java | public UnsafeMappedBuffer map(long size, FileChannel.MapMode mode) {
return map(position(), size, mode);
} |
python | def smoothing(self, f, w, sm, smtol, gstol):
"""
Smooths a surface f by choosing nodal function values and gradients to
minimize the linearized curvature of F subject to a bound on the
deviation from the data values. This is more appropriate than interpolation
when significant errors are present in the data.
Parameters
----------
f : array of floats, shape (n,)
field to apply smoothing on
w : array of floats, shape (n,)
weights associated with data value in f
w[i] = 1/sigma_f^2 is a good rule of thumb.
sm : float
positive parameter specifying an upper bound on Q2(f).
generally n-sqrt(2n) <= sm <= n+sqrt(2n)
smtol : float
specifies relative error in satisfying the constraint
sm(1-smtol) <= Q2 <= sm(1+smtol) between 0 and 1.
gstol : float
tolerance for convergence.
gstol = 0.05*mean(sigma_f)^2 is a good rule of thumb.
Returns
-------
f_smooth : array of floats, shape (n,)
smoothed version of f
(dfdx, dfdy, dfdz) : tuple of floats, tuple of 3 shape (n,) arrays
first derivatives of f_smooth in the x, y, and z directions
"""
if f.size != self.npoints or f.size != w.size:
raise ValueError('f and w should be the same size as mesh')
f, w = self._shuffle_field(f, w)
sigma = 0
iflgs = 0
prnt = -1
f_smooth, df, ierr = _ssrfpack.smsurf(self._x, self._y, self._z, f, self.lst, self.lptr, self.lend,\
iflgs, sigma, w, sm, smtol, gstol, prnt)
if ierr < 0:
raise ValueError('ierr={} in gradg\n{}'.format(ierr, _ier_codes[ierr]))
if ierr == 1:
raise RuntimeWarning("No errors were encountered but the constraint is not active --\n\
F, FX, and FY are the values and partials of a linear function \
which minimizes Q2(F), and Q1 = 0.")
if ierr == 2:
raise RuntimeWarning("The constraint could not be satisfied to within SMTOL\
due to ill-conditioned linear systems.")
return self._deshuffle_field(f_smooth), self._deshuffle_field(df[0], df[1], df[2]) |
java | @SuppressWarnings("unchecked")
private Set<String> getFieldsForIndex0(String index) {
if(index == null) {
return Collections.EMPTY_SET;
}
if(auditLogIndex != null && auditLogIndex.equalsIgnoreCase(index)) {
return Collections.EMPTY_SET;
}
if(auditLogPattern != null) {
if(index.equalsIgnoreCase(getExpandedIndexName(auditLogPattern, null))) {
return Collections.EMPTY_SET;
}
}
final Set<String> tmp = new HashSet<String>(100);
for(String indexPattern: readEnabledFields.keySet()) {
if(indexPattern != null && !indexPattern.isEmpty() && WildcardMatcher.match(indexPattern, index)) {
tmp.addAll(readEnabledFields.get(indexPattern));
}
}
return tmp;
} |
java | public void setValue(Node value) throws ValueFormatException, VersionException, LockException,
ConstraintViolationException, RepositoryException
{
setValue(valueFactory.createValue(value));
} |
java | public static TraceFactory getTraceFactory(NLS nls) {
return (TraceFactory) Utils.getImpl("com.ibm.ws.objectManager.utils.TraceFactoryImpl",
new Class[] { NLS.class },
new Object[] { nls });
} |
java | public void load() {
PluginDefinition definition = getDefinition();
if (!initialized && definition != null) {
BaseComponent top;
try {
initialized = true;
top = container.getFirstChild();
if (top == null) {
top = PageUtil.createPage(definition.getUrl(), container).get(0);
}
} catch (Throwable e) {
container.destroyChildren();
throw createChainedException("Initialize", e, null);
}
if (pluginControllers != null) {
for (Object controller : pluginControllers) {
top.wireController(controller);
}
}
findListeners(container);
executeAction(PluginAction.LOAD, true);
}
} |
java | @Override
public HTableDescriptor[] listTables(String regex) throws IOException {
return listTables(Pattern.compile(regex));
} |
python | def create_from_url(self, url, params=None):
''' /vi/iso/create_from_url
POST - account
Create a new ISO image on the current account.
The ISO image will be downloaded from a given URL.
Download status can be checked with the v1/iso/list call.
Link: https://www.vultr.com/api/#iso_create_from_url
'''
params = update_params(params, {
'url': url,
})
return self.request('/v1/iso/create_from_url', params, 'POST') |
python | def startup(api=None):
"""Runs the provided function on startup, passing in an instance of the api"""
def startup_wrapper(startup_function):
apply_to_api = hug.API(api) if api else hug.api.from_object(startup_function)
apply_to_api.add_startup_handler(startup_function)
return startup_function
return startup_wrapper |
python | def process_m2m(self, obj, pk_set=None, action=None, update_fields=None, cache_key=None, **kwargs):
"""Process signals from dependencies.
Remove signal is processed in two parts. For details see:
:func:`~Dependency.connect`
"""
if action not in (None, 'post_add', 'pre_remove', 'post_remove', 'post_clear'):
return
if action == 'post_remove':
build_kwargs = self.remove_cache.take(cache_key)
else:
build_kwargs = self._get_build_kwargs(obj, pk_set, action, update_fields, **kwargs)
if action == 'pre_remove':
self.remove_cache.set(cache_key, build_kwargs)
return
if build_kwargs:
self.index.build(**build_kwargs) |
java | private Period setTimeUnitInternalValue(TimeUnit unit, int value) {
int ord = unit.ordinal;
if (counts[ord] != value) {
int[] newCounts = new int[counts.length];
for (int i = 0; i < counts.length; ++i) {
newCounts[i] = counts[i];
}
newCounts[ord] = value;
return new Period(timeLimit, inFuture, newCounts);
}
return this;
} |
python | def reconstruct_binary(self, attachments):
"""Reconstruct a decoded packet using the given list of binary
attachments.
"""
self.data = self._reconstruct_binary_internal(self.data,
self.attachments) |
java | private ImageView createBlankSpace() {
ImageView view = new ImageView(getContext());
TableRow.LayoutParams params = new TableRow.LayoutParams(mSwatchLength, mSwatchLength);
params.setMargins(mMarginSize, mMarginSize, mMarginSize, mMarginSize);
view.setLayoutParams(params);
return view;
} |
java | public void setUsernames(java.util.Collection<String> usernames) {
if (usernames == null) {
this.usernames = null;
return;
}
this.usernames = new java.util.ArrayList<String>(usernames);
} |
python | def _get_localized_fn(path, root_dir):
"""
Return absolute `path` relative to `root_dir`.
When `path` == ``/home/xex/somefile.txt`` and `root_dir` == ``/home``,
returned path will be ``/xex/somefile.txt``.
Args:
path (str): Absolute path beginning in `root_dir`.
root_dir (str): Absolute path containing `path` argument.
Returns:
str: Local `path` when `root_dir` is considered as root of FS.
"""
local_fn = path
if path.startswith(root_dir):
local_fn = path.replace(root_dir, "", 1)
if not local_fn.startswith("/"):
return "/" + local_fn
return local_fn |
java | public static boolean isCompatibleSARLLibraryVersion(String version) {
if (version != null) {
final Version currentVersion = Version.parseVersion(SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING);
final Version paramVersion = Version.parseVersion(version);
return currentVersion.getMajor() == paramVersion.getMajor()
&& currentVersion.getMinor() == paramVersion.getMinor();
}
return false;
} |
python | def _load_playbook_from_file(self, path, vars={}):
'''
run top level error checking on playbooks and allow them to include other playbooks.
'''
playbook_data = utils.parse_yaml_from_file(path)
accumulated_plays = []
play_basedirs = []
if type(playbook_data) != list:
raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list")
basedir = os.path.dirname(path) or '.'
utils.plugins.push_basedir(basedir)
for play in playbook_data:
if type(play) != dict:
raise errors.AnsibleError("parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play)
if 'include' in play:
tokens = shlex.split(play['include'])
items = ['']
for k in play.keys():
if not k.startswith("with_"):
# These are the keys allowed to be mixed with playbook includes
if k in ("include", "vars"):
continue
else:
raise errors.AnsibleError("parse error: playbook includes cannot be used with other directives: %s" % play)
plugin_name = k[5:]
if plugin_name not in utils.plugins.lookup_loader:
raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
terms = utils.template_ds(basedir, play[k], vars)
items = utils.plugins.lookup_loader.get(plugin_name, basedir=basedir, runner=None).run(terms, inject=vars)
for item in items:
incvars = vars.copy()
incvars['item'] = item
if 'vars' in play:
if isinstance(play['vars'], dict):
incvars.update(play['vars'])
elif isinstance(play['vars'], list):
for v in play['vars']:
incvars.update(v)
for t in tokens[1:]:
(k,v) = t.split("=", 1)
incvars[k] = utils.template_ds(basedir, v, incvars)
included_path = utils.path_dwim(basedir, tokens[0])
(plays, basedirs) = self._load_playbook_from_file(included_path, incvars)
for p in plays:
if 'vars' not in p:
p['vars'] = {}
if isinstance(p['vars'], dict):
p['vars'].update(incvars)
elif isinstance(p['vars'], list):
p['vars'].extend([dict(k=v) for k,v in incvars.iteritems()])
accumulated_plays.extend(plays)
play_basedirs.extend(basedirs)
else:
accumulated_plays.append(play)
play_basedirs.append(basedir)
return (accumulated_plays, play_basedirs) |
java | public String getHeader(String name)
{
if (_header != null)
{
String strVal = _header.getHeader(name);
if (strVal != null)
return strVal;
}
if (this.headerTable != null)
{
int i = 0;
for (Object obj : headerTable[0])
{
String strVal = (String) obj;
if (name.equals(strVal))
{
return (String) headerTable[1].get(i);
}
i++;
}
}
return null;
} |
python | def list_lzh (archive, compression, cmd, verbosity, interactive):
"""List a LZH archive."""
cmdlist = [cmd]
if verbosity > 1:
cmdlist.append('v')
else:
cmdlist.append('l')
cmdlist.append(archive)
return cmdlist |
java | public static void sqlcurdate(StringBuilder buf, List<? extends CharSequence> parsedArgs) throws SQLException {
zeroArgumentFunctionCall(buf, "current_date", "curdate", parsedArgs);
} |
python | def get_first(self, table=None):
"""Just the first entry."""
if table is None: table = self.main_table
query = 'SELECT * FROM "%s" LIMIT 1;' % table
return self.own_cursor.execute(query).fetchone() |
python | def list_instances_json(self, application=None, show_only_destroyed=False):
""" Get list of instances in json format converted to list"""
# todo: application should not be parameter here. Application should do its own list, just in sake of code reuse
q_filter = {'sortBy': 'byCreation', 'descending': 'true',
'mode': 'short',
'from': '0', 'to': '10000'}
if not show_only_destroyed:
q_filter['showDestroyed'] = 'false'
else:
q_filter['showDestroyed'] = 'true'
q_filter['showRunning'] = 'false'
q_filter['showError'] = 'false'
q_filter['showLaunching'] = 'false'
if application:
q_filter["applicationFilterId"] = application.applicationId
resp_json = self._router.get_instances(org_id=self.organizationId, params=q_filter).json()
if type(resp_json) == dict:
instances = [instance for g in resp_json['groups'] for instance in g['records']]
else: # TODO: This is compatibility fix for platform < 37.1
instances = resp_json
return instances |
python | def connect(self, host, port):
'''Connect to the provided host, port'''
conn = connection.Connection(host, port,
reconnection_backoff=self._reconnection_backoff,
auth_secret=self._auth_secret,
timeout=self._connect_timeout,
**self._identify_options)
if conn.alive():
conn.setblocking(0)
self.add(conn)
return conn |
java | @Override
public String convertSchema(Schema inputSchema, WorkUnitState workUnit) throws SchemaConversionException {
return EnvelopeSchemaConverter.class.getName();
} |
python | def _find_logs(self, compile_workunit):
"""Finds all logs under the given workunit."""
for idx, workunit in enumerate(compile_workunit.children):
for output_name, outpath in workunit.output_paths().items():
if output_name in ('stdout', 'stderr'):
yield idx, workunit.name, output_name, outpath |
python | def replace(needle, with_=None, in_=None):
"""Replace occurrences of string(s) with other string(s) in (a) string(s).
Unlike the built in :meth:`str.replace` method, this function provides
clean API that clearly distinguishes the "needle" (string to replace),
the replacement string, and the target string to perform replacement in
(the "haystack").
Additionally, a simultaneous replacement of several needles is possible.
Note that this is different from performing multiple separate replacements
one after another.
Examples::
replace('foo', with_='bar', in_=some_text)
replace('foo', with_='bar').in_(other_text)
replace('foo').with_('bar').in_(another_text)
replace(['foo', 'bar']).with_('baz').in_(perhaps_a_long_text)
replace({'foo': 'bar', 'baz': 'qud'}).in_(even_longer_text)
:param needle: String to replace, iterable thereof,
or a mapping from needles to corresponding replacements
:param with_: Replacement string, if ``needle`` was not a mapping
:param in_: Optional string to perform replacement in
:return: If all parameters were provided, result is the final string
after performing a specified replacement.
Otherwise, a :class:`Replacer` object is returned, allowing
e.g. to perform the same replacements in many haystacks.
"""
if needle is None:
raise TypeError("replacement needle cannot be None")
if not needle:
raise ValueError("replacement needle cannot be empty")
if is_string(needle):
replacer = Replacer((needle,))
else:
ensure_iterable(needle)
if not is_mapping(needle):
if all(imap(is_pair, needle)):
needle = dict(needle)
elif not all(imap(is_string, needle)):
raise TypeError("invalid replacement needle")
replacer = Replacer(needle)
if with_ is not None:
ensure_string(with_)
replacer = replacer.with_(with_)
if in_ is not None:
ensure_string(in_)
return replacer.in_(in_)
return replacer |
python | def walkSignalPorts(rootPort: LPort):
"""
recursively walk ports without any children
"""
if rootPort.children:
for ch in rootPort.children:
yield from walkSignalPorts(ch)
else:
yield rootPort |
java | public boolean deleteByIds(Object... idValues) {
Table table = _getTable();
if (idValues == null || idValues.length != table.getPrimaryKey().length)
throw new IllegalArgumentException("Primary key nubmer must equals id value number and can not be null");
return deleteById(table, idValues);
} |
python | def rm(pattern):
"""Recursively remove a file or dir by pattern."""
paths = glob.glob(pattern)
for path in paths:
if path.startswith('.git/'):
continue
if os.path.isdir(path):
def onerror(fun, path, excinfo):
exc = excinfo[1]
if exc.errno != errno.ENOENT:
raise
safe_print("rmdir -f %s" % path)
shutil.rmtree(path, onerror=onerror)
else:
safe_print("rm %s" % path)
os.remove(path) |
python | def assumption_list_string(assumptions, assumption_dict):
'''
Takes in a list of short forms of assumptions and an assumption
dictionary, and returns a "list" form of the long form of the
assumptions.
Raises
------
ValueError
if one of the assumptions is not in assumption_dict.
'''
if isinstance(assumptions, six.string_types):
raise TypeError('assumptions must be an iterable of strings, not a '
'string itself')
for a in assumptions:
if a not in assumption_dict.keys():
raise ValueError('{} not present in assumption_dict'.format(a))
assumption_strings = [assumption_dict[a] for a in assumptions]
return strings_to_list_string(assumption_strings) |
java | double sparseProbabilisticAlgorithmCardinality() {
final int m = this.m/*for performance*/;
// compute the "indicator function" -- sum(2^(-M[j])) where M[j] is the
// 'j'th register value
double sum = 0;
int numberOfZeroes = 0/*"V" in the paper*/;
for(int j=0; j<m; j++) {
final long register = sparseProbabilisticStorage.get(j);
sum += 1.0 / (1L << register);
if(register == 0L) numberOfZeroes++;
}
// apply the estimate and correction to the indicator function
final double estimator = alphaMSquared / sum;
if((numberOfZeroes != 0) && (estimator < smallEstimatorCutoff)) {
return HLLUtil.smallEstimator(m, numberOfZeroes);
} else if(estimator <= largeEstimatorCutoff) {
return estimator;
} else {
return HLLUtil.largeEstimator(log2m, regwidth, estimator);
}
} |
python | def get_state(self, caller):
"""
Get per-program state.
"""
if caller in self.state:
return self.state[caller]
else:
rv = self.state[caller] = DictObject()
return rv |
python | def update_aliases(self):
""" Get aliases information from room state
Returns:
boolean: True if the aliases changed, False if not
"""
changed = False
try:
response = self.client.api.get_room_state(self.room_id)
except MatrixRequestError:
return False
for chunk in response:
content = chunk.get('content')
if content:
if 'aliases' in content:
aliases = content['aliases']
if aliases != self.aliases:
self.aliases = aliases
changed = True
if chunk.get('type') == 'm.room.canonical_alias':
canonical_alias = content['alias']
if self.canonical_alias != canonical_alias:
self.canonical_alias = canonical_alias
changed = True
if changed and self.aliases and not self.canonical_alias:
self.canonical_alias = self.aliases[0]
return changed |
java | public static String missingFieldName(ScmPluginException e) {
if (isFieldMissing(e)) {
return ((ScmUserInfoMissing) e).getFieldName();
}
return null;
} |
java | public com.google.api.ads.admanager.axis.v201811.PlacementTargeting getPlacementSegment() {
return placementSegment;
} |
java | public static MozuUrl getShippingInclusionRulesUrl(String profilecode, String responseFields)
{
UrlFormatter formatter = new UrlFormatter("/api/commerce/shipping/admin/profiles/{profilecode}/rules/shippinginclusions?responseFields={responseFields}");
formatter.formatUrl("profilecode", profilecode);
formatter.formatUrl("responseFields", responseFields);
return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ;
} |
java | @Override
public void onActivityStopped(final Activity activity) {
if (foregroundActivities.decrementAndGet() < 0) {
ApptentiveLog.e("Incorrect number of foreground Activities encountered. Resetting to 0.");
foregroundActivities.set(0);
}
if (checkFgBgRoutine != null) {
delayedChecker.removeCallbacks(checkFgBgRoutine);
}
/* When one activity transits to another one, there is a brief period during which the former
* is paused but the latter has not yet resumed. To prevent false negative, check routine is
* delayed
*/
delayedChecker.postDelayed(checkFgBgRoutine = new Runnable() {
@Override
public void run() {
try {
if (foregroundActivities.get() == 0 && isAppForeground) {
appEnteredBackground();
isAppForeground = false;
}
} catch (Exception e) {
ApptentiveLog.e(e, "Exception in delayed checking");
ErrorMetrics.logException(e);
}
}
}, CHECK_DELAY_SHORT);
dispatchOnConversationQueue(new DispatchTask() {
@Override
protected void execute() {
ApptentiveNotificationCenter.defaultCenter()
.postNotification(NOTIFICATION_ACTIVITY_STOPPED, NOTIFICATION_KEY_ACTIVITY, activity);
}
});
} |
java | public DataSink<T> printOnTaskManager(String prefix) {
return output(new PrintingOutputFormat<T>(prefix, false));
} |
java | @Override
public EClass getIfcSegmentIndexSelect() {
if (ifcSegmentIndexSelectEClass == null) {
ifcSegmentIndexSelectEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(1156);
}
return ifcSegmentIndexSelectEClass;
} |
java | public static String setParameters(String value, final Object[] params) {
if (params != null) {
for (int i = 0; i < params.length; i++) {
value = value.replaceAll("\\{" + i + "\\}", params[i].toString());
}
}
return value;
} |
java | private static String decodeFormFields(final String content, final Charset charset) {
if (content == null) {
return null;
}
return urlDecode(content, (charset != null) ? charset : Charsets.UTF_8, true);
} |
python | def _filter_matching_lines(self, city_name, country, matching):
"""
Returns an iterable whose items are the lists of split tokens of every
text line matched against the city ID files according to the provided
combination of city_name, country and matching style
:param city_name: str
:param country: str or `None`
:param matching: str
:return: list of lists
"""
result = list()
# find the right file to scan and extract its lines. Upon "like"
# matchings, just read all files
if matching == 'like':
lines = [l.strip() for l in self._get_all_lines()]
else:
filename = self._assess_subfile_from(city_name)
lines = [l.strip() for l in self._get_lines(filename)]
# look for toponyms matching the specified city_name and according to
# the specified matching style
for line in lines:
tokens = line.split(",")
# sometimes city names have an inner comma...
if len(tokens) == 6:
tokens = [tokens[0]+','+tokens[1], tokens[2], tokens[3],
tokens[4], tokens[5]]
# check country
if country is not None:
if tokens[4] != country:
continue
# check city_name
if self._city_name_matches(city_name, tokens[0], matching):
result.append(tokens)
return result |
python | def mostCommonElement(elements: Iterable[T], to_hashable_f: Callable=None):
"""
Find the most frequent element of a collection.
:param elements: An iterable of elements
:param to_hashable_f: (optional) if defined will be used to get
hashable presentation for non-hashable elements. Otherwise json.dumps
is used with sort_keys=True
:return: element which is the most frequent in the collection and
the number of its occurrences
"""
class _Hashable(collections.abc.Hashable):
def __init__(self, orig):
self.orig = orig
if isinstance(orig, collections.Hashable):
self.hashable = orig
elif to_hashable_f is not None:
self.hashable = to_hashable_f(orig)
else:
self.hashable = json.dumps(orig, sort_keys=True)
def __eq__(self, other):
return self.hashable == other.hashable
def __hash__(self):
return hash(self.hashable)
_elements = (_Hashable(el) for el in elements)
most_common, counter = Counter(_elements).most_common(n=1)[0]
return most_common.orig, counter |
java | protected List<ENTITY> findColumns(List<Match> matches, String[] columns) {
Query query = queryGenerator.getMiniSelectQuery(Arrays.asList(columns), matches);
return findBySQL(query.getSql(), query.getParams());
} |
python | def instance(self):
"""Content instance of the wrapped object
"""
if self._instance is None:
logger.debug("SuperModel::instance: *Wakup object*")
self._instance = api.get_object(self.brain)
return self._instance |
java | public static Set<IBond> findMappedBonds(IReaction reaction) {
Set<IBond> mapped = new HashSet<>();
// first we collect the occurrance of mapped bonds from reacants then products
Set<IntTuple> mappedReactantBonds = new HashSet<>();
Set<IntTuple> mappedProductBonds = new HashSet<>();
for (IAtomContainer reactant : reaction.getReactants().atomContainers()) {
for (IBond bond : reactant.bonds()) {
Integer begidx = bond.getBegin().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
Integer endidx = bond.getEnd().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
if (begidx != null && endidx != null)
mappedReactantBonds.add(new IntTuple(begidx, endidx));
}
}
// fail fast
if (mappedReactantBonds.isEmpty())
return Collections.emptySet();
for (IAtomContainer product : reaction.getProducts().atomContainers()) {
for (IBond bond : product.bonds()) {
Integer begidx = bond.getBegin().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
Integer endidx = bond.getEnd().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
if (begidx != null && endidx != null)
mappedProductBonds.add(new IntTuple(begidx, endidx));
}
}
// fail fast
if (mappedProductBonds.isEmpty())
return Collections.emptySet();
// repeat above but now store any that are different or unmapped as being mapped
for (IAtomContainer reactant : reaction.getReactants().atomContainers()) {
for (IBond bond : reactant.bonds()) {
Integer begidx = bond.getBegin().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
Integer endidx = bond.getEnd().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
if (begidx != null && endidx != null && mappedProductBonds.contains(new IntTuple(begidx, endidx)))
mapped.add(bond);
}
}
for (IAtomContainer product : reaction.getProducts().atomContainers()) {
for (IBond bond : product.bonds()) {
Integer begidx = bond.getBegin().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
Integer endidx = bond.getEnd().getProperty(CDKConstants.ATOM_ATOM_MAPPING);
if (begidx != null && endidx != null && mappedReactantBonds.contains(new IntTuple(begidx, endidx)))
mapped.add(bond);
}
}
return mapped;
} |
python | def alias(ctx, search, backend):
"""
Searches for the given project and interactively add an alias for it.
"""
projects = ctx.obj['projects_db'].search(search, active_only=True)
projects = sorted(projects, key=lambda project: project.name)
if len(projects) == 0:
ctx.obj['view'].msg(
"No active project matches your search string '%s'." %
''.join(search)
)
return
ctx.obj['view'].projects_list(projects, True)
try:
number = ctx.obj['view'].select_project(projects)
except CancelException:
return
project = projects[number]
ctx.obj['view'].project_with_activities(project, numbered_activities=True)
try:
number = ctx.obj['view'].select_activity(project.activities)
except CancelException:
return
retry = True
while retry:
try:
alias = ctx.obj['view'].select_alias()
except CancelException:
return
if alias in aliases_database:
mapping = aliases_database[alias]
overwrite = ctx.obj['view'].overwrite_alias(alias, mapping)
if not overwrite:
return
elif overwrite:
retry = False
# User chose "retry"
else:
retry = True
else:
retry = False
activity = project.activities[number]
mapping = Mapping(mapping=(project.id, activity.id),
backend=project.backend)
ctx.obj['settings'].add_alias(alias, mapping)
ctx.obj['settings'].write_config()
ctx.obj['view'].alias_added(alias, (project.id, activity.id)) |
python | def close_session_log(self):
"""Close the session_log file (if it is a file that we opened)."""
if self.session_log is not None and self._session_log_close:
self.session_log.close()
self.session_log = None |
python | def num_no_signups(self):
"""How many people have not signed up?"""
signup_users_count = User.objects.get_students().count()
return signup_users_count - self.num_signups() |
java | public static int whichMax(double[] x) {
double m = Double.NEGATIVE_INFINITY;
int which = 0;
for (int i = 0; i < x.length; i++) {
if (x[i] > m) {
m = x[i];
which = i;
}
}
return which;
} |
python | def module_refresh(self, force_refresh=False, notify=False):
'''
Refresh the functions and returners.
'''
log.debug('Refreshing modules. Notify=%s', notify)
self.functions, self.returners, _, self.executors = self._load_modules(force_refresh, notify=notify)
self.schedule.functions = self.functions
self.schedule.returners = self.returners |
python | def authenticate(self):
"""Send login request and update User instance, login headers, and token expiration"""
# Temporarily remove auth from Swimlane session for auth request to avoid recursive loop during login request
self._swimlane._session.auth = None
resp = self._swimlane.request(
'post',
'user/login',
json={
'userName': self._username,
'password': self._password
},
)
self._swimlane._session.auth = self
# Get JWT from response content
json_content = resp.json()
token = json_content.pop('token', None)
# Grab token expiration
token_data = jwt.decode(token, verify=False)
token_expiration = pendulum.from_timestamp(token_data['exp'])
headers = {
'Authorization': 'Bearer {}'.format(token)
}
# Create User instance for authenticating user from login response data
user = User(self._swimlane, _user_raw_from_login_content(json_content))
self._login_headers = headers
self.user = user
self._token_expiration = token_expiration |
java | private LinkedHashMap<HypergraphNode<Variable>, Integer> createInitialOrdering(final Formula formula, final Map<Variable, HypergraphNode<Variable>> nodes) {
final LinkedHashMap<HypergraphNode<Variable>, Integer> initialOrdering = new LinkedHashMap<>();
final List<Variable> dfsOrder = this.dfsOrdering.getOrder(formula);
for (int i = 0; i < dfsOrder.size(); i++)
initialOrdering.put(nodes.get(dfsOrder.get(i)), i);
return initialOrdering;
} |
python | def gone_online(stream):
"""
Distributes the users online status to everyone he has dialog with
"""
while True:
packet = yield from stream.get()
session_id = packet.get('session_key')
if session_id:
user_owner = get_user_from_session(session_id)
if user_owner:
logger.debug('User ' + user_owner.username + ' gone online')
# find all connections including user_owner as opponent,
# send them a message that the user has gone online
online_opponents = list(filter(lambda x: x[1] == user_owner.username, ws_connections))
online_opponents_sockets = [ws_connections[i] for i in online_opponents]
yield from fanout_message(online_opponents_sockets,
{'type': 'gone-online', 'usernames': [user_owner.username]})
else:
pass # invalid session id
else:
pass |
java | @Override
protected void submitFaxJobImpl(FaxJob faxJob)
{
//create command
String command=this.createSubmitFaxCommand(faxJob);
//execute process
this.executeProcess(command,FaxActionType.SUBMIT_FAX_JOB);
} |
java | @Override
public boolean isMachineRunning( TargetHandlerParameters parameters, String machineId )
throws TargetException {
boolean result = false;
try {
DockerClient dockerClient = DockerUtils.createDockerClient( parameters.getTargetProperties());
ContainerState state = DockerUtils.getContainerState( machineId, dockerClient );
result = state != null && extractBoolean( state.getRunning());
} catch( Exception e ) {
// nothing, we consider it is not running
Utils.logException( this.logger, e );
}
return result;
} |
java | public static int copyWsByteBuffer(WsByteBuffer src, WsByteBuffer dst, int amount)
{
int amountCopied = amount;
int dstRemaining = dst.remaining();
int srcRemaining = src.remaining();
if (amountCopied > dstRemaining) amountCopied = dstRemaining;
if (amountCopied > srcRemaining) amountCopied = srcRemaining;
if (amountCopied > 0)
{
int srcLimit = src.limit();
src.limit(src.position()+amountCopied);
dst.put(src);
src.limit(srcLimit);
}
return amountCopied;
} |
java | @Contract(pure = true)
public static ByteBuf empty() {
assert EMPTY.head == 0;
assert EMPTY.tail == 0;
return EMPTY;
} |
java | public AttributedCharacterIterator formatToCharacterIterator(Object arguments) {
StringBuffer result = new StringBuffer();
ArrayList iterators = new ArrayList();
if (arguments == null) {
throw new NullPointerException(
"formatToCharacterIterator must be passed non-null object");
}
subformat((Object[]) arguments, result, null, iterators);
if (iterators.size() == 0) {
return createAttributedCharacterIterator("");
}
return createAttributedCharacterIterator(
(AttributedCharacterIterator[])iterators.toArray(
new AttributedCharacterIterator[iterators.size()]));
} |
java | public String toBitString() {
StringBuffer sb = new StringBuffer();
final int shift = this.bytes.length * 8 - this.bitLength;
final byte[] shiftRight = shiftRight(this.bytes, shift);
for (final byte b : shiftRight) {
String asString = Integer.toBinaryString(b & 0xFF);
while (asString.length() < 8) {
asString = "0" + asString;
}
sb.append(asString);
}
if (sb.length() >= this.bitLength) {
return sb.substring(sb.length() - this.bitLength, sb.length());
}
else {
final String n = sb.toString();
sb = new StringBuffer();
while (sb.length() + n.length() < this.bitLength) {
sb.append("0");
}
return sb + n;
}
} |
python | def clean_title(title):
"""
Clean title -> remove dates, remove duplicated spaces and strip title.
Args:
title (str): Title.
Returns:
str: Clean title without dates, duplicated, trailing and leading spaces.
"""
date_pattern = re.compile(r'\W*'
r'\d{1,2}'
r'[/\-.]'
r'\d{1,2}'
r'[/\-.]'
r'(?=\d*)(?:.{4}|.{2})'
r'\W*')
title = date_pattern.sub(' ', title)
title = re.sub(r'\s{2,}', ' ', title)
title = title.strip()
return title |
python | def currentMode( self ):
"""
Returns the current mode for this widget.
:return <XOrbBrowserWidget.Mode>
"""
if ( self.uiCardACT.isChecked() ):
return XOrbBrowserWidget.Mode.Card
elif ( self.uiDetailsACT.isChecked() ):
return XOrbBrowserWidget.Mode.Detail
else:
return XOrbBrowserWidget.Mode.Thumbnail |
java | @SafeVarargs
public static <T> Iterable<List<T>> cartesianProduct(final Iterable<T>... iterables) {
if (iterables.length == 0) {
return Collections.singletonList(Collections.emptyList());
}
return () -> new AllCombinationsIterator<>(iterables);
} |
python | def collect(config, pconn):
"""
All the heavy lifting done here
"""
# initialize collection target
# tar files
if config.analyze_file:
logger.debug("Client analyzing a compress filesystem.")
target = {'type': 'compressed_file',
'name': os.path.splitext(
os.path.basename(config.analyze_file))[0],
'location': config.analyze_file}
# mountpoints
elif config.analyze_mountpoint:
logger.debug("Client analyzing a filesystem already mounted.")
target = {'type': 'mountpoint',
'name': os.path.splitext(
os.path.basename(config.analyze_mountpoint))[0],
'location': config.analyze_mountpoint}
# image
elif config.analyze_image_id:
logger.debug("Client running in image mode.")
logger.debug("Scanning for matching image.")
from .containers import get_targets
targets = get_targets(config)
if len(targets) == 0:
sys.exit(constants.sig_kill_bad)
target = targets[0]
# host, or inside container
else:
if config.analyze_container:
logger.debug('Client running in container mode.')
else:
logger.debug("Host selected as scanning target.")
target = constants.default_target
branch_info = get_branch_info(config, pconn)
pc = InsightsUploadConf(config)
tar_file = None
collection_rules = pc.get_conf_file()
rm_conf = pc.get_rm_conf()
if rm_conf:
logger.warn("WARNING: Excluding data from files")
# defaults
archive = None
container_connection = None
mp = None
compressed_filesystem = None
try:
# analyze docker images
if target['type'] == 'docker_image':
from .containers import open_image
container_connection = open_image(target['name'])
logging_name = 'Docker image ' + target['name']
if container_connection:
mp = container_connection.get_fs()
else:
logger.error('Could not open %s for analysis', logging_name)
return False
# analyze compressed files
elif target['type'] == 'compressed_file':
logging_name = 'Compressed file ' + target['name'] + ' at location ' + target['location']
from .compressed_file import InsightsCompressedFile
compressed_filesystem = InsightsCompressedFile(target['location'])
if compressed_filesystem.is_tarfile is False:
logger.debug("Could not access compressed tar filesystem.")
return False
mp = compressed_filesystem.get_filesystem_path()
# analyze mountpoints
elif target['type'] == 'mountpoint':
logging_name = 'Filesystem ' + target['name'] + ' at location ' + target['location']
mp = config.analyze_mountpoint
# analyze the host
elif target['type'] == 'host':
logging_name = determine_hostname()
# nothing found to analyze
else:
logger.error('Unexpected analysis target: %s', target['type'])
return False
archive = InsightsArchive(compressor=config.compressor,
target_name=target['name'])
atexit.register(_delete_archive_internal, config, archive)
# determine the target type and begin collection
# we infer "docker_image" SPEC analysis for certain types
if target['type'] in ["mountpoint", "compressed_file"]:
target_type = "docker_image"
else:
target_type = target['type']
logger.debug("Inferring target_type '%s' for SPEC collection", target_type)
logger.debug("Inferred from '%s'", target['type'])
dc = DataCollector(config, archive, mountpoint=mp)
logger.info('Starting to collect Insights data for %s', logging_name)
dc.run_collection(collection_rules, rm_conf, branch_info)
tar_file = dc.done(collection_rules, rm_conf)
finally:
# called on loop iter end or unexpected exit
if container_connection:
container_connection.close()
# cleanup the temporary stuff for analyzing tar files
if config.analyze_file is not None and compressed_filesystem is not None:
compressed_filesystem.cleanup_temp_filesystem()
return tar_file |
java | @Override
public void remove(Class<? extends IBasicEntity> type, String key) throws CachingException {
EntityCachingServiceLocator.getEntityCachingService().remove(type, key);
} |
java | public Set<String> getSkipBuildPhrases() {
return new HashSet<String>(Arrays.asList(getTrigger().getSkipBuildPhrase().split("[\\r\\n]+")));
} |
python | def main(argv=None):
"""
Run wake on lan as a CLI application.
"""
parser = argparse.ArgumentParser(
description='Wake one or more computers using the wake on lan'
' protocol.')
parser.add_argument(
'macs',
metavar='mac address',
nargs='+',
help='The mac addresses or of the computers you are trying to wake.')
parser.add_argument(
'-i',
metavar='ip',
default=BROADCAST_IP,
help='The ip address of the host to send the magic packet to.'
' (default {})'.format(BROADCAST_IP))
parser.add_argument(
'-p',
metavar='port',
type=int,
default=DEFAULT_PORT,
help='The port of the host to send the magic packet to (default 9)')
args = parser.parse_args(argv)
send_magic_packet(*args.macs, ip_address=args.i, port=args.p) |
java | void consume(List<PassFactory> factories) {
Loop currentLoop = new Loop();
for (PassFactory factory : factories) {
if (factory.isOneTimePass()) {
if (currentLoop.isPopulated()) {
passes.add(currentLoop);
currentLoop = new Loop();
}
addOneTimePass(factory);
} else {
currentLoop.addLoopedPass(factory);
}
}
if (currentLoop.isPopulated()) {
passes.add(currentLoop);
}
} |
java | public MethodDoc wrap(MethodDoc source) {
if (source == null || source instanceof Proxy<?> || !(source instanceof MethodDocImpl)) {
return source;
}
return new MethodDocWrapper((MethodDocImpl) source);
} |
java | public static final <T,R> Function<T,R> ifNotNullThenElse(
final Type<T> targetType,
final IFunction<? super T,R> thenFunction, final IFunction<? super T,R> elseFunction) {
return ifTrueThenElse(targetType, FnObject.isNotNull(), thenFunction, elseFunction);
} |
python | def managed(name,
config,
api_url=None,
page_id=None,
api_key=None,
api_version=None,
pace=_PACE,
allow_empty=False):
'''
Manage the StatusPage configuration.
config
Dictionary with the expected configuration of the StatusPage.
The main level keys of this dictionary represent the endpoint name.
If a certain endpoint does not exist in this structure, it will be ignored / not configured.
page_id
Page ID. Can also be specified in the config file.
api_key
API key. Can also be specified in the config file.
api_version: 1
API version. Can also be specified in the config file.
api_url
Custom API URL in case the user has a StatusPage service running in a custom environment.
pace: 1
Max requests per second allowed by the API.
allow_empty: False
Allow empty config.
SLS example:
.. code-block:: yaml
my-statuspage-config:
statuspage.managed:
- config:
components:
- name: component1
group_id: uy4g37rf
- name: component2
group_id: 3n4uyu4gf
incidents:
- name: incident1
status: resolved
impact: major
backfilled: false
- name: incident2
status: investigating
impact: minor
'''
complete_diff = {}
ret = _default_ret(name)
if not config and not allow_empty:
ret.update({
'result': False,
'comment': 'Cannot remove everything. To allow this, please set the option `allow_empty` as True.'
})
return ret
is_empty = True
for endpoint_name, endpoint_expected_config in six.iteritems(config):
if endpoint_expected_config:
is_empty = False
endpoint_existing_config_ret = __salt__['statuspage.retrieve'](endpoint=endpoint_name,
api_url=api_url,
page_id=page_id,
api_key=api_key,
api_version=api_version)
if not endpoint_existing_config_ret.get('result'):
ret.update({
'comment': endpoint_existing_config_ret.get('comment')
})
return ret # stop at first error
endpoint_existing_config = endpoint_existing_config_ret.get('out')
complete_diff[endpoint_name] = _compute_diff(endpoint_expected_config, endpoint_existing_config)
if is_empty and not allow_empty:
ret.update({
'result': False,
'comment': 'Cannot remove everything. To allow this, please set the option `allow_empty` as True.'
})
return ret
any_changes = False
for endpoint_name, endpoint_diff in six.iteritems(complete_diff):
if endpoint_diff.get('add') or endpoint_diff.get('update') or endpoint_diff.get('remove'):
any_changes = True
if not any_changes:
ret.update({
'result': True,
'comment': 'No changes required.',
'changes': {}
})
return ret
ret.update({
'changes': complete_diff
})
if __opts__.get('test'):
ret.update({
'comment': 'Testing mode. Would apply the following changes:',
'result': None
})
return ret
for endpoint_name, endpoint_diff in six.iteritems(complete_diff):
endpoint_sg = endpoint_name[:-1] # singular
for new_endpoint in endpoint_diff.get('add'):
log.debug('Defining new %s %s',
endpoint_sg,
new_endpoint
)
adding = __salt__['statuspage.create'](endpoint=endpoint_name,
api_url=api_url,
page_id=page_id,
api_key=api_key,
api_version=api_version,
**new_endpoint)
if not adding.get('result'):
ret.update({
'comment': adding.get('comment')
})
return ret
if pace:
time.sleep(1/pace)
for update_endpoint in endpoint_diff.get('update'):
if 'id' not in update_endpoint:
continue
endpoint_id = update_endpoint.pop('id')
log.debug('Updating %s #%s: %s',
endpoint_sg,
endpoint_id,
update_endpoint
)
updating = __salt__['statuspage.update'](endpoint=endpoint_name,
id=endpoint_id,
api_url=api_url,
page_id=page_id,
api_key=api_key,
api_version=api_version,
**update_endpoint)
if not updating.get('result'):
ret.update({
'comment': updating.get('comment')
})
return ret
if pace:
time.sleep(1/pace)
for remove_endpoint in endpoint_diff.get('remove'):
if 'id' not in remove_endpoint:
continue
endpoint_id = remove_endpoint.pop('id')
log.debug('Removing %s #%s',
endpoint_sg,
endpoint_id
)
removing = __salt__['statuspage.delete'](endpoint=endpoint_name,
id=endpoint_id,
api_url=api_url,
page_id=page_id,
api_key=api_key,
api_version=api_version)
if not removing.get('result'):
ret.update({
'comment': removing.get('comment')
})
return ret
if pace:
time.sleep(1/pace)
ret.update({
'result': True,
'comment': 'StatusPage updated.'
})
return ret |
java | public void addRouter(final Class<?> routeType, Object controller) {
Method[] methods = routeType.getDeclaredMethods();
if (BladeKit.isEmpty(methods)) {
return;
}
String nameSpace = null, suffix = null;
if (null != routeType.getAnnotation(Path.class)) {
nameSpace = routeType.getAnnotation(Path.class).value();
suffix = routeType.getAnnotation(Path.class).suffix();
}
if (null == nameSpace) {
log.warn("Route [{}] not path annotation", routeType.getName());
return;
}
for (Method method : methods) {
com.blade.mvc.annotation.Route mapping = method.getAnnotation(com.blade.mvc.annotation.Route.class);
GetRoute getRoute = method.getAnnotation(GetRoute.class);
PostRoute postRoute = method.getAnnotation(PostRoute.class);
PutRoute putRoute = method.getAnnotation(PutRoute.class);
DeleteRoute deleteRoute = method.getAnnotation(DeleteRoute.class);
this.parseRoute(RouteStruct.builder().mapping(mapping)
.getRoute(getRoute).postRoute(postRoute)
.putRoute(putRoute).deleteRoute(deleteRoute)
.nameSpace(nameSpace)
.suffix(suffix).routeType(routeType)
.controller(controller).method(method)
.build());
}
} |
java | public static void v(String tag, String msg) {
if (sLevel > LEVEL_VERBOSE) {
return;
}
Log.v(tag, msg);
} |
java | @Override
public OutlierResult run(Database database, Relation<O> relation) {
StepProgress stepprog = LOG.isVerbose() ? new StepProgress("OnlineLOF", 3) : null;
Pair<Pair<KNNQuery<O>, KNNQuery<O>>, Pair<RKNNQuery<O>, RKNNQuery<O>>> queries = getKNNAndRkNNQueries(database, relation, stepprog);
KNNQuery<O> kNNRefer = queries.getFirst().getFirst();
KNNQuery<O> kNNReach = queries.getFirst().getSecond();
RKNNQuery<O> rkNNRefer = queries.getSecond().getFirst();
RKNNQuery<O> rkNNReach = queries.getSecond().getSecond();
LOFResult<O> lofResult = super.doRunInTime(relation.getDBIDs(), kNNRefer, kNNReach, stepprog);
lofResult.setRkNNRefer(rkNNRefer);
lofResult.setRkNNReach(rkNNReach);
// add listener
KNNListener l = new LOFKNNListener(lofResult);
((MaterializeKNNPreprocessor<O>) ((PreprocessorKNNQuery<O>) lofResult.getKNNRefer()).getPreprocessor()).addKNNListener(l);
((MaterializeKNNPreprocessor<O>) ((PreprocessorKNNQuery<O>) lofResult.getKNNReach()).getPreprocessor()).addKNNListener(l);
return lofResult.getResult();
} |
python | def read_local_manifest(self):
""" Read the file manifest, or create a new one if there isn't one already """
manifest = file_or_default(self.get_full_file_path(self.manifest_file), {
'format_version' : 2,
'root' : '/',
'have_revision' : 'root',
'files' : {}}, json.loads)
if 'format_version' not in manifest or manifest['format_version'] < 2:
raise SystemExit('Please update the client manifest format')
return manifest |
python | def get_api_key_with_prefix(self, identifier):
"""
Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if self.api_key.get(identifier) and self.api_key_prefix.get(identifier):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier]
elif self.api_key.get(identifier):
return self.api_key[identifier] |
java | public void replaceChild(int index, N newChild) {
checkNotNull(newChild);
tryRemoveFromOldParent(newChild);
N oldChild = children.set(index, newChild);
oldChild.setParent(null);
newChild.setParent(master);
} |
java | public void addResourceClassPath(ClassLoader loader)
{
String classpath = null;
if (loader instanceof DynamicClassLoader)
classpath = ((DynamicClassLoader) loader).getResourcePathSpecificFirst();
else
classpath = CauchoUtil.getClassPath();
addClassPath(classpath);
} |
python | def json_encode(obj, serialize):
""" Handle encoding complex types. """
if hasattr(obj, 'to_dict'):
return obj.to_dict(serialize=serialize)
elif isinstance(obj, datetime):
return obj.date().isoformat()
elif isinstance(obj, date):
return obj.isoformat()
elif isinstance(obj, ProxyDict):
return dict(obj)
elif isinstance(obj, ProxyList):
return list(obj)
elif is_iterable_but_not_string(obj):
return list(obj) |
python | def unite_dict(a, b):
"""
>>> a = {'name': 'Sylvanas'}
>>> b = {'gender': 'Man'}
>>> unite_dict(a, b)
{'name': 'Sylvanas', 'gender': 'Man'}
"""
c = {}
c.update(a)
c.update(b)
return c |
python | def reset_parcov(self,arg=None):
"""reset the parcov attribute to None
Parameters
----------
arg : str or pyemu.Matrix
the value to assign to the parcov attribute. If None,
the private __parcov attribute is cleared but not reset
"""
self.logger.statement("resetting parcov")
self.__parcov = None
if arg is not None:
self.parcov_arg = arg |
python | def backends(self, back=None):
'''
Return the backend list
'''
if not back:
back = self.opts['fileserver_backend']
else:
if not isinstance(back, list):
try:
back = back.split(',')
except AttributeError:
back = six.text_type(back).split(',')
if isinstance(back, Sequence):
# The test suite uses an ImmutableList type (based on
# collections.Sequence) for lists, which breaks this function in
# the test suite. This normalizes the value from the opts into a
# list if it is based on collections.Sequence.
back = list(back)
ret = []
if not isinstance(back, list):
return ret
# Avoid error logging when performing lookups in the LazyDict by
# instead doing the membership check on the result of a call to its
# .keys() attribute rather than on the LazyDict itself.
server_funcs = self.servers.keys()
try:
subtract_only = all((x.startswith('-') for x in back))
except AttributeError:
pass
else:
if subtract_only:
# Only subtracting backends from enabled ones
ret = self.opts['fileserver_backend']
for sub in back:
if '{0}.envs'.format(sub[1:]) in server_funcs:
ret.remove(sub[1:])
elif '{0}.envs'.format(sub[1:-2]) in server_funcs:
ret.remove(sub[1:-2])
return ret
for sub in back:
if '{0}.envs'.format(sub) in server_funcs:
ret.append(sub)
elif '{0}.envs'.format(sub[:-2]) in server_funcs:
ret.append(sub[:-2])
return ret |
java | public boolean startsWith(PushbackReader in, int size) throws IOException
{
InputReader reader = Input.getInstance(in, size);
boolean b = startsWith(reader);
reader.release();
return b;
} |
java | @SuppressWarnings("unchecked")
public static <T> Class<T> generate(final Class<T> type,
final Class<? extends java.lang.annotation.Annotation> scope,
final Class<?> anchor) {
Preconditions.checkNotNull(type, "Original type required");
Preconditions.checkArgument(type.isInterface() || Modifier.isAbstract(type.getModifiers()),
"Type must be interface or abstract class, but provided type is not: %s", type.getName());
final String targetClassName = type.getName() + DYNAMIC_CLASS_POSTFIX;
final ClassLoader classLoader = type.getClassLoader();
/*
* Synchronization is required to avoid double generation and consequent problems.
* Very unlikely that this method would be called too often and synchronization become bottleneck.
* Using original class as monitor to allow concurrent generation for different classes.
*/
synchronized (type) {
Class<?> targetClass;
try {
// will work if class was already generated
targetClass = classLoader.loadClass(targetClassName);
} catch (ClassNotFoundException ex) {
targetClass = generateClass(type, targetClassName, classLoader, scope, anchor);
}
return (Class<T>) targetClass;
}
} |
java | private BigInteger evalUnaryExpression(AstNode exprAst) {
// only 'unary-operator cast-expression' production is allowed in #if-context
AstNode operator = exprAst.getFirstChild();
AstNode operand = operator.getNextSibling();
AstNodeType operatorType = operator.getFirstChild().getType();
if (operatorType.equals(CppPunctuator.PLUS)) {
return evalToInt(operand);
} else if (operatorType.equals(CppPunctuator.MINUS)) {
return evalToInt(operand).negate();
} else if (operatorType.equals(CppPunctuator.NOT)) {
boolean result = !evalToBoolean(operand);
return result ? BigInteger.ONE : BigInteger.ZERO;
} else if (operatorType.equals(CppPunctuator.BW_NOT)) {
//todo: need more information (signed/unsigned, data type length) to invert bits in all cases correct
return evalToInt(operand).not().and(UINT64_MAX);
} else {
throw new EvaluationException("Unknown unary operator '" + operatorType + "'");
}
} |
java | @XmlElementDecl(namespace = "http://schema.intuit.com/finance/v3", name = "MasterAccount", substitutionHeadNamespace = "http://schema.intuit.com/finance/v3", substitutionHeadName = "IntuitObject")
public JAXBElement<MasterAccount> createMasterAccount(MasterAccount value) {
return new JAXBElement<MasterAccount>(_MasterAccount_QNAME, MasterAccount.class, null, value);
} |
java | private void updateCustomComponentBounds()
{
if (customComponents == null)
{
return;
}
if (table == null)
{
return;
}
for (int i = 0; i < customComponents.size(); i++)
{
JComponent component = customComponents.get(i);
Rectangle rect = getHeaderRect(i);
rect.height = customComponentHeight;
component.setBounds(rect);
}
revalidate();
} |
java | public List<FaceletTaglibFunctionType<WebFacelettaglibraryDescriptor>> getAllFunction()
{
List<FaceletTaglibFunctionType<WebFacelettaglibraryDescriptor>> list = new ArrayList<FaceletTaglibFunctionType<WebFacelettaglibraryDescriptor>>();
List<Node> nodeList = model.get("function");
for(Node node: nodeList)
{
FaceletTaglibFunctionType<WebFacelettaglibraryDescriptor> type = new FaceletTaglibFunctionTypeImpl<WebFacelettaglibraryDescriptor>(this, "function", model, node);
list.add(type);
}
return list;
} |
java | private void push(char c) throws JSONException {
if (m_top >= MAXDEPTH) {
throw new JSONException("Nesting too deep.");
}
m_stack[m_top] = c;
m_mode = c;
m_top += 1;
} |
python | def pause_trial(self, trial):
"""Pauses the trial.
We want to release resources (specifically GPUs) when pausing an
experiment. This results in PAUSED state that similar to TERMINATED.
"""
assert trial.status == Trial.RUNNING, trial.status
try:
self.save(trial, Checkpoint.MEMORY)
self.stop_trial(trial, stop_logger=False)
self.set_status(trial, Trial.PAUSED)
except Exception:
logger.exception("Error pausing runner.")
self.set_status(trial, Trial.ERROR) |
java | @Nullable
@ObjectiveCName("loadDraftWithPeer:")
public String loadDraft(Peer peer) {
return modules.getMessagesModule().loadDraft(peer);
} |
python | def convert_trunc(trunc):
"""Convert BEL1 trunc() to BEL2 var()"""
parent_fn_name = trunc.parent_function.name_short
prefix_list = {"p": "p.", "r": "r.", "g": "c."}
prefix = prefix_list[parent_fn_name]
new_var_arg = f'"truncated at {trunc.args[0].value}"'
new_var = bel.lang.ast.Function("var", bo.spec)
new_var.add_argument(StrArg(new_var_arg, new_var))
return new_var |
java | private static InputStream getFileAsInputStream(String file) throws InitializationException {
if (file == null) {
throw new NullPointerException("File is null");
}
try {
return new FileInputStream(new File(file));
} catch (FileNotFoundException e) {
throw new InitializationException("File not found: " + file);
}
} |
java | public void prepareCommit() {
if (m_baseCollection instanceof List) {
List<?> list = (List<?>)m_baseCollection;
list.clear();
} else if (m_baseCollection instanceof SortedMap) {
SortedMap<?, ?> map = (SortedMap<?, ?>)m_baseCollection;
map.clear();
}
} |
java | protected AWSCredentials sanitizeCredentials(AWSCredentials credentials) {
String accessKeyId = null;
String secretKey = null;
String token = null;
synchronized (credentials) {
accessKeyId = credentials.getAWSAccessKeyId();
secretKey = credentials.getAWSSecretKey();
if ( credentials instanceof AWSSessionCredentials ) {
token = ((AWSSessionCredentials) credentials).getSessionToken();
}
}
if (secretKey != null) secretKey = secretKey.trim();
if (accessKeyId != null) accessKeyId = accessKeyId.trim();
if (token != null) token = token.trim();
if (credentials instanceof AWSSessionCredentials) {
return new BasicSessionCredentials(accessKeyId, secretKey, token);
}
return new BasicAWSCredentials(accessKeyId, secretKey);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.