language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | AtomSymbol generatePeriodicSymbol(final int number, final int hydrogens, final int mass, final int charge,
final int unpaired, HydrogenPosition position) {
TextOutline element = number == 0 ? new TextOutline("*", font)
: new TextOutline(Elements.ofNumber(number).symbol(), font);
TextOutline hydrogenAdjunct = defaultHydrogenLabel;
// the hydrogen count, charge, and mass adjuncts are script size
TextOutline hydrogenCount = new TextOutline(Integer.toString(hydrogens), font).resize(scriptSize, scriptSize);
TextOutline chargeAdjunct = new TextOutline(chargeAdjunctText(charge, unpaired), font).resize(scriptSize,
scriptSize);
TextOutline massAdjunct = new TextOutline(Integer.toString(mass), font).resize(scriptSize, scriptSize);
// position each adjunct relative to the element label and each other
hydrogenAdjunct = positionHydrogenLabel(position, element, hydrogenAdjunct);
hydrogenCount = positionSubscript(hydrogenAdjunct, hydrogenCount);
chargeAdjunct = positionChargeLabel(hydrogens, position, chargeAdjunct, element, hydrogenAdjunct);
massAdjunct = positionMassLabel(massAdjunct, element);
// when the hydrogen label is positioned to the left we may need to nudge it
// over to account for the hydrogen count and/or the mass adjunct colliding
// with the element label
if (position == Left) {
final double nudgeX = hydrogenXDodge(hydrogens, mass, element, hydrogenAdjunct, hydrogenCount, massAdjunct);
hydrogenAdjunct = hydrogenAdjunct.translate(nudgeX, 0);
hydrogenCount = hydrogenCount.translate(nudgeX, 0);
}
final List<TextOutline> adjuncts = new ArrayList<TextOutline>(4);
if (hydrogens > 0) adjuncts.add(hydrogenAdjunct);
if (hydrogens > 1) adjuncts.add(hydrogenCount);
if (charge != 0 || unpaired > 0) adjuncts.add(chargeAdjunct);
if (mass > 0) adjuncts.add(massAdjunct);
return new AtomSymbol(element, adjuncts);
} |
java | protected List<TokenList.Token> parseParameterCommaBlock( TokenList tokens, Sequence sequence ) {
// find all the comma tokens
List<TokenList.Token> commas = new ArrayList<TokenList.Token>();
TokenList.Token token = tokens.first;
int numBracket = 0;
while( token != null ) {
if( token.getType() == Type.SYMBOL ) {
switch( token.getSymbol() ) {
case COMMA:
if( numBracket == 0)
commas.add(token);
break;
case BRACKET_LEFT: numBracket++; break;
case BRACKET_RIGHT: numBracket--; break;
}
}
token = token.next;
}
List<TokenList.Token> output = new ArrayList<TokenList.Token>();
if( commas.isEmpty() ) {
output.add(parseBlockNoParentheses(tokens, sequence, false));
} else {
TokenList.Token before = tokens.first;
for (int i = 0; i < commas.size(); i++) {
TokenList.Token after = commas.get(i);
if( before == after )
throw new ParseError("No empty function inputs allowed!");
TokenList.Token tmp = after.next;
TokenList sublist = tokens.extractSubList(before,after);
sublist.remove(after);// remove the comma
output.add(parseBlockNoParentheses(sublist, sequence, false));
before = tmp;
}
// if the last character is a comma then after.next above will be null and thus before is null
if( before == null )
throw new ParseError("No empty function inputs allowed!");
TokenList.Token after = tokens.last;
TokenList sublist = tokens.extractSubList(before, after);
output.add(parseBlockNoParentheses(sublist, sequence, false));
}
return output;
} |
java | public NamedQuery<Entity<T>> getOrCreateNamedQuery()
{
List<Node> nodeList = childNode.get("named-query");
if (nodeList != null && nodeList.size() > 0)
{
return new NamedQueryImpl<Entity<T>>(this, "named-query", childNode, nodeList.get(0));
}
return createNamedQuery();
} |
python | def get_vr(self, epoch=None):
"""get VR string from .spec Version, Release and Epoch
epoch is None: prefix epoch if present (default)
epoch is True: prefix epoch even if not present (0:)
epoch is False: omit epoch even if present
"""
version = self.get_tag('Version', expand_macros=True)
e = None
if epoch is None or epoch:
try:
e = self.get_tag('Epoch')
except exception.SpecFileParseError:
pass
if epoch is None and e:
epoch = True
if epoch:
if not e:
e = '0'
version = '%s:%s' % (e, version)
release = self.get_tag('Release')
release = re.sub(r'%\{?\??dist\}?$', '', release)
release = self.expand_macro(release)
if release:
return '%s-%s' % (version, release)
return version |
python | def _load_models(self) -> None:
"""Maybe load all the models to be assembled together and save them to the ``self._models`` attribute."""
if self._models is None:
logging.info('Loading %d models', len(self._model_paths))
def load_model(model_path: str):
logging.debug('\tloading %s', model_path)
if path.isdir(model_path):
model_path = path.join(model_path, CXF_CONFIG_FILE)
config = load_config(model_path)
config['model']['inputs'] = self._inputs
config['model']['outputs'] = self._outputs
return create_model(config, output_dir=None, dataset=self._dataset,
restore_from=path.dirname(model_path))
self._models = list(map(load_model, self._model_paths)) |
python | def callback(self, provider):
"""
Handles 3rd party callback and processes it's data
"""
provider = self.get_provider(provider)
try:
return provider.authorized_handler(self.login)(provider=provider)
except OAuthException as ex:
logging.error("Data: %s", ex.data)
raise |
java | public synchronized boolean isLoaded(LCMSDataSubset subset) {
for (Map.Entry<Object, Set<LCMSDataSubset>> entry : cache.asMap().entrySet()) {
Object user = entry.getKey();
if (user == null) {
// it has already been reclaimed, move on
// this condition should not be triggered though, because we
// have eviction listener, which should run cleanup whenever
// any key in the cache becomes null
continue;
}
Set<LCMSDataSubset> subsets = entry.getValue();
for (LCMSDataSubset subsetInUse : subsets) {
if (subsetInUse.contains(subset)) {
return true;
}
}
}
return false;
} |
java | public static boolean isEmbedded(String driverClassName) {
return databases
.stream()
.filter(JdbcDatabase::isEmbedded)
.anyMatch(db -> db.driverClassName.equals(driverClassName));
} |
java | public ListDomainAppResponse listDomainApp(ListDomainAppRequest request) {
checkNotNull(request, "The parameter request should NOT be null.");
checkStringNotEmpty(request.getPlayDomain(), "playDomain should NOT be empty.");
InternalRequest internalRequest = createRequest(HttpMethodName.GET,
request, LIVE_DOMAIN, request.getPlayDomain(), LIVE_APP);
return invokeHttpClient(internalRequest, ListDomainAppResponse.class);
} |
java | public final Tuple7<T9, T10, T11, T12, T13, T14, T15> skip8() {
return new Tuple7<>(v9, v10, v11, v12, v13, v14, v15);
} |
java | @SuppressWarnings("unchecked")
public static <T> T[] newArrayInstance(Class<T> baseClass, int length) {
return (T[]) Array.newInstance(baseClass, length);
} |
java | public static RgbaColor fromHsl(String hsl) {
String[] parts = getHslParts(hsl).split(",");
if (parts.length == 3) {
float[] HSL = new float[] { parseInt(parts[0]), parseInt(parts[1]), parseInt(parts[2]) };
return fromHsl(HSL);
}
else {
return getDefaultColor();
}
} |
java | public static Long getCurrentNid() {
Node node = ArbitrateConfigRegistry.getConfig().currentNode();
if (node != null) {
return node.getId();
} else {
return null;
}
} |
java | private void maybeUpdateScrollbars() {
if (!isAttached()) {
return;
}
/*
* Measure the height and width of the content directly. Note that measuring
* the height and width of the container element (which should be the same)
* doesn't work correctly in IE.
*/
Widget w = getWidget();
int contentHeight = (w == null) ? 0 : w.getOffsetHeight();
// Determine which scrollbars to show.
int realScrollbarHeight = 0;
int realScrollbarWidth = 0;
if ((m_scrollbar != null) && (getElement().getClientHeight() < contentHeight)) {
// Vertical scrollbar is defined and required.
realScrollbarWidth = m_verticalScrollbarWidth;
}
if (realScrollbarWidth > 0) {
m_scrollLayer.getStyle().clearDisplay();
m_scrollbar.setScrollHeight(Math.max(0, contentHeight - realScrollbarHeight));
} else if (m_scrollLayer != null) {
m_scrollLayer.getStyle().setDisplay(Display.NONE);
}
if (m_scrollbar instanceof I_CmsDescendantResizeHandler) {
((I_CmsDescendantResizeHandler)m_scrollbar).onResizeDescendant();
}
maybeUpdateScrollbarPositions();
} |
python | def put_multipart(self, local_path, destination_s3_path, part_size=DEFAULT_PART_SIZE, **kwargs):
"""
Put an object stored locally to an S3 path
using S3 multi-part upload (for files > 8Mb).
:param local_path: Path to source local file
:param destination_s3_path: URL for target S3 location
:param part_size: Part size in bytes. Default: 8388608 (8MB)
:param kwargs: Keyword arguments are passed to the boto function `upload_fileobj` as ExtraArgs
"""
self._check_deprecated_argument(**kwargs)
from boto3.s3.transfer import TransferConfig
# default part size for boto3 is 8Mb, changing it to fit part_size
# provided as a parameter
transfer_config = TransferConfig(multipart_chunksize=part_size)
(bucket, key) = self._path_to_bucket_and_key(destination_s3_path)
self.s3.meta.client.upload_fileobj(
Fileobj=open(local_path, 'rb'), Bucket=bucket, Key=key, Config=transfer_config, ExtraArgs=kwargs) |
python | def rdtxt_gos(go_file, prt):
"""Read GO IDs from a file."""
goids_all = set()
if not os.path.exists(go_file):
raise RuntimeError("CAN NOT READ GO FILE: {FILE}\n".format(FILE=go_file))
re_go = re.compile(r'(GO:\d{7})+?')
re_com = re.compile(r'^\s*#') # Lines starting with a '#' are comment lines and ignored
with open(go_file) as ifstrm:
for line in ifstrm:
# Skip lines that are comments
if re_com.search(line):
continue
# Search for GO IDs on the line
goids_found = re_go.findall(line)
if goids_found:
goids_all.update(goids_found)
if prt:
prt.write(" {N} GO IDs READ: {TXT}\n".format(N=len(goids_all), TXT=go_file))
return goids_all |
java | private RdfStream getResourceTriples(final int limit, final FedoraResource resource) {
final PreferTag returnPreference;
if (prefer != null && prefer.hasReturn()) {
returnPreference = prefer.getReturn();
} else if (prefer != null && prefer.hasHandling()) {
returnPreference = prefer.getHandling();
} else {
returnPreference = PreferTag.emptyTag();
}
final LdpPreferTag ldpPreferences = new LdpPreferTag(returnPreference);
final Predicate<Triple> tripleFilter = ldpPreferences.prefersServerManaged() ? x -> true :
IS_MANAGED_TRIPLE.negate();
final List<Stream<Triple>> streams = new ArrayList<>();
if (returnPreference.getValue().equals("minimal")) {
streams.add(getTriples(resource, of(PROPERTIES, MINIMAL)).filter(tripleFilter));
// Mementos already have the server managed properties in the PROPERTIES category
// since mementos are immutable and these triples are no longer managed
if (ldpPreferences.prefersServerManaged() && !resource.isMemento()) {
streams.add(getTriples(resource, of(SERVER_MANAGED, MINIMAL)));
}
} else {
streams.add(getTriples(resource, PROPERTIES).filter(tripleFilter));
// Additional server-managed triples about this resource
// Mementos already have the server managed properties in the PROPERTIES category
// since mementos are immutable and these triples are no longer managed
if (ldpPreferences.prefersServerManaged() && !resource.isMemento()) {
streams.add(getTriples(resource, SERVER_MANAGED));
}
// containment triples about this resource
if (ldpPreferences.prefersContainment()) {
if (limit == -1) {
streams.add(getTriples(resource, LDP_CONTAINMENT));
} else {
streams.add(getTriples(resource, LDP_CONTAINMENT).limit(limit));
}
}
// LDP container membership triples for this resource
if (ldpPreferences.prefersMembership()) {
streams.add(getTriples(resource, LDP_MEMBERSHIP));
}
// Include inbound references to this object
if (ldpPreferences.prefersReferences()) {
streams.add(getTriples(resource, INBOUND_REFERENCES));
}
// Embed the children of this object
if (ldpPreferences.prefersEmbed()) {
streams.add(getTriples(resource, EMBED_RESOURCES));
}
}
final RdfStream rdfStream = new DefaultRdfStream(
asNode(resource), streams.stream().reduce(empty(), Stream::concat));
if (httpTripleUtil != null && ldpPreferences.prefersServerManaged()) {
return httpTripleUtil.addHttpComponentModelsForResourceToStream(rdfStream, resource, uriInfo,
translator());
}
return rdfStream;
} |
java | @Override
public ByteBuffer getBuffer()
{
// If we are expected to read large message, we'll opt for zero-
// copy, i.e. we'll ask caller to fill the data directly to the
// message. Note that subsequent read(s) are non-blocking, thus
// each single read reads at most SO_RCVBUF bytes at once not
// depending on how large is the chunk returned from here.
// As a consequence, large messages being received won't block
// other engines running in the same I/O thread for excessive
// amounts of time.
if (toRead >= bufsize) {
zeroCopy = true;
return readPos.duplicate();
}
else {
zeroCopy = false;
buf.clear();
return buf;
}
} |
java | public BigDecimal toBigDecimal() {
StringBuilder sb = new StringBuilder();
this.createNumber(sb);
return new BigDecimal(sb.toString());
} |
python | def preserve_sig(wrapper, orig_func, force=False):
"""
Decorates a wrapper function.
It seems impossible to presever signatures in python 2 without eval
(Maybe another option is to write to a temporary module?)
Args:
wrapper: the function wrapping orig_func to change the signature of
orig_func: the original function to take the signature from
References:
http://emptysqua.re/blog/copying-a-python-functions-signature/
https://code.google.com/p/micheles/source/browse/decorator/src/decorator.py
TODO:
checkout funcsigs
https://funcsigs.readthedocs.org/en/latest/
CommandLine:
python -m utool.util_decor --test-preserve_sig
Example:
>>> # ENABLE_DOCTEST
>>> import utool as ut
>>> #ut.rrrr(False)
>>> def myfunction(self, listinput_, arg1, *args, **kwargs):
>>> " just a test function "
>>> return [x + 1 for x in listinput_]
>>> #orig_func = ut.take
>>> orig_func = myfunction
>>> wrapper = ut.accepts_scalar_input2([0])(orig_func)
>>> _wrp_preserve1 = ut.preserve_sig(wrapper, orig_func, True)
>>> _wrp_preserve2 = ut.preserve_sig(wrapper, orig_func, False)
>>> print('_wrp_preserve2 = %r' % (_wrp_preserve1,))
>>> print('_wrp_preserve2 = %r' % (_wrp_preserve2,))
>>> #print('source _wrp_preserve1 = %s' % (ut.get_func_sourcecode(_wrp_preserve1),))
>>> #print('source _wrp_preserve2 = %s' % (ut.get_func_sourcecode(_wrp_preserve2)),)
>>> result = str(_wrp_preserve1)
>>> print(result)
"""
#if True:
# import functools
# return functools.wraps(orig_func)(wrapper)
from utool._internal import meta_util_six
from utool import util_str
from utool import util_inspect
if wrapper is orig_func:
# nothing to do
return orig_func
orig_docstr = meta_util_six.get_funcdoc(orig_func)
orig_docstr = '' if orig_docstr is None else orig_docstr
orig_argspec = util_inspect.get_func_argspec(orig_func)
wrap_name = meta_util_six.get_funccode(wrapper).co_name
orig_name = meta_util_six.get_funcname(orig_func)
# At the very least preserve info in a dictionary
_utinfo = {}
_utinfo['orig_func'] = orig_func
_utinfo['wrap_name'] = wrap_name
_utinfo['orig_name'] = orig_name
_utinfo['orig_argspec'] = orig_argspec
if hasattr(wrapper, '_utinfo'):
parent_wrapper_utinfo = wrapper._utinfo
_utinfo['parent_wrapper_utinfo'] = parent_wrapper_utinfo
if hasattr(orig_func, '_utinfo'):
parent_orig_utinfo = orig_func._utinfo
_utinfo['parent_orig_utinfo'] = parent_orig_utinfo
# environment variable is set if you are building documentation
# preserve sig if building docs
building_docs = os.environ.get('UTOOL_AUTOGEN_SPHINX_RUNNING', 'OFF') == 'ON'
if force or SIG_PRESERVE or building_docs:
# PRESERVES ALL SIGNATURES WITH EXECS
src_fmt = r'''
def _wrp_preserve{defsig}:
""" {orig_docstr} """
try:
return wrapper{callsig}
except Exception as ex:
import utool as ut
msg = ('Failure in signature preserving wrapper:\n')
ut.printex(ex, msg)
raise
'''
# Put wrapped function into a scope
globals_ = {'wrapper': wrapper}
locals_ = {}
# argspec is :ArgSpec(args=['bar', 'baz'], varargs=None, keywords=None,
# defaults=(True,))
# get orig functions argspec
# get functions signature
# Get function call signature (no defaults)
# Define an exec function
argspec = inspect.getargspec(orig_func)
(args, varargs, varkw, defaults) = argspec
defsig = inspect.formatargspec(*argspec)
callsig = inspect.formatargspec(*argspec[0:3])
# TODO:
# ut.func_defsig
# ut.func_callsig
src_fmtdict = dict(defsig=defsig, callsig=callsig, orig_docstr=orig_docstr)
src = textwrap.dedent(src_fmt).format(**src_fmtdict)
# Define the new function on the fly
# (I wish there was a non exec / eval way to do this)
#print(src)
code = compile(src, '<string>', 'exec')
six.exec_(code, globals_, locals_)
#six.exec_(src, globals_, locals_)
# Use functools.update_wapper to complete preservation
_wrp_preserve = functools.update_wrapper(locals_['_wrp_preserve'], orig_func)
# Keep debug info
_utinfo['src'] = src
# Set an internal sig variable that we may use
#_wrp_preserve.__sig__ = defsig
else:
# PRESERVES SOME SIGNATURES NO EXEC
# signature preservation is turned off. just preserve the name.
# Does not use any exec or eval statments.
_wrp_preserve = functools.update_wrapper(wrapper, orig_func)
# Just do something to preserve signature
DEBUG_WRAPPED_DOCSTRING = False
if DEBUG_WRAPPED_DOCSTRING:
new_docstr_fmtstr = util_str.codeblock(
'''
Wrapped function {wrap_name}({orig_name})
orig_argspec = {orig_argspec}
orig_docstr = {orig_docstr}
'''
)
else:
new_docstr_fmtstr = util_str.codeblock(
'''
{orig_docstr}
'''
)
new_docstr = new_docstr_fmtstr.format(
wrap_name=wrap_name, orig_name=orig_name, orig_docstr=orig_docstr,
orig_argspec=orig_argspec)
meta_util_six.set_funcdoc(_wrp_preserve, new_docstr)
_wrp_preserve._utinfo = _utinfo
return _wrp_preserve |
python | def trial(request):
"""View for a single trial."""
job_id = request.GET.get("job_id")
trial_id = request.GET.get("trial_id")
recent_trials = TrialRecord.objects \
.filter(job_id=job_id) \
.order_by("-start_time")
recent_results = ResultRecord.objects \
.filter(trial_id=trial_id) \
.order_by("-date")[0:2000]
current_trial = TrialRecord.objects \
.filter(trial_id=trial_id) \
.order_by("-start_time")[0]
context = {
"job_id": job_id,
"trial_id": trial_id,
"current_trial": current_trial,
"recent_results": recent_results,
"recent_trials": recent_trials
}
return render(request, "trial.html", context) |
python | def flatten_zip_dataset(*args):
"""A list of examples to a dataset containing mixed examples.
Given a list of `n` dataset examples, flatten them by converting
each element into a dataset and concatenating them to convert into a
single dataset.
Args:
*args: A list containing one example each from `n` different datasets.
Returns:
flattened: A new dataset containing the examples from the list as part
of a single dataset.
"""
flattened = tf.data.Dataset.from_tensors(args[0])
for ex in args[1:]:
flattened = flattened.concatenate(tf.data.Dataset.from_tensors(ex))
return flattened |
java | public Object readValue(InputStream is, Class<Object> clazz) {
try {
return this.mapper.readValue(is, clazz);
}
catch (Exception e) {
LogFactory.getLog(JsonHandler.class).info("deserialize json to object", e);
return null;
}
} |
python | def next_row(self):
"""Move to next row from currently selected row."""
row = self.currentIndex().row()
rows = self.source_model.rowCount()
if row + 1 == rows:
row = -1
self.selectRow(row + 1) |
java | public Object accept(QueryNodeVisitor visitor, Object data) throws RepositoryException {
return visitor.visit(this, data);
} |
python | def kpl_status(self, address, group):
"""Get the status of a KPL button."""
addr = Address(address)
device = self.plm.devices[addr.id]
device.states[group].async_refresh_state() |
java | public static <K, V> Map<K, V> cast(Map<?, ?> map) {
return map == null ? null : new CastingMap<K, V>(map);
} |
python | def generate_ngram_data_set(self, token_list, n=2):
'''
Generate the N-gram's pair.
Args:
token_list: The list of tokens.
n N
Returns:
zip of Tuple(Training N-gram data, Target N-gram data)
'''
n_gram_tuple_zip = self.generate_tuple_zip(token_list, n)
n_gram_tuple_list = [n_gram_tuple for n_gram_tuple in n_gram_tuple_zip]
n_gram_data_set = self.generate_tuple_zip(n_gram_tuple_list, 2)
return n_gram_data_set |
java | public static Map<String, ValueCommand> createParameterMap(Object... params) {
assert params.length % 2 == 0;
Map<String, ValueCommand> ret = new HashMap<String, ValueCommand>();
for (int i = 0; i < params.length; i = i + 2) {
String param = params[i].toString();
Value value = convertFromJava(params[i + 1]);
ret.put(param, new LiteralCommand(value));
}
return ret;
} |
java | final public SwitchExpressionBuilder<T> switchOn() {
return new SwitchExpressionBuilder<T>(new ExpressionHandler<SwitchBuilder<T>>() {
public SwitchBuilder<T> handleExpression(final Expression e) {
return new SwitchBuilder<T>(e, new SwitchStatementsHandler<T>() {
public T handleStatement(SwitchStatement switchStatement) {
return statementHandler().handleStatement(switchStatement);
}
}, builder);
}
});
} |
java | private Glyph createGlyphBasics(Entity e, boolean idIsFinal)
{
Glyph g = factory.createGlyph();
g.setId(convertID(e.getUri()));
String s = typeMatchMap.get(e.getModelInterface());
if(( //use 'or' sbgn class for special generic physical entities
e instanceof Complex && !((Complex)e).getMemberPhysicalEntity().isEmpty()
&& ((Complex) e).getComponent().isEmpty())
||
(e instanceof SimplePhysicalEntity && ((SimplePhysicalEntity) e).getEntityReference()==null
&& !((SimplePhysicalEntity) e).getMemberPhysicalEntity().isEmpty()))
{
s = GlyphClazz.OR.getClazz();
}
g.setClazz(s);
// Set the label
Label label = factory.createLabel();
label.setText(findLabelFor(e));
g.setLabel(label);
// Detect if ubique
if (ubiqueDet != null && ubiqueDet.isUbique(e))
{
g.setClone(factory.createGlyphClone());
}
// Put on state variables
if (!g.getClazz().equals(GlyphClazz.OR.getClazz())) {
g.getGlyph().addAll(getInformation(e));
}
// Record the mapping
if (idIsFinal) {
Set<String> uris = new HashSet<String>();
uris.add(e.getUri());
sbgn2BPMap.put(g.getId(), uris);
}
return g;
} |
java | protected JButton createLeftOneTouchButton() {
SeaGlassArrowButton b = new SeaGlassArrowButton(SwingConstants.NORTH);
int oneTouchSize = lookupOneTouchSize();
b.setName("SplitPaneDivider.leftOneTouchButton");
b.setMinimumSize(new Dimension(oneTouchSize, oneTouchSize));
b.setCursor(Cursor.getPredefinedCursor(
splitPane.getOrientation() ==
JSplitPane.HORIZONTAL_SPLIT ?
Cursor.W_RESIZE_CURSOR:Cursor.N_RESIZE_CURSOR));
b.setFocusPainted(false);
b.setBorderPainted(false);
b.setRequestFocusEnabled(false);
b.setDirection(mapDirection(true));
return b;
} |
java | @Override
protected List<Object> populateEntities(EntityMetadata m, Client client)
{
ApplicationMetadata appMetadata = kunderaMetadata.getApplicationMetadata();
try
{
String query = appMetadata.getQuery(getJPAQuery());
boolean isNative = kunderaQuery.isNative();
if (isNative)
{ // Native Query Support is enabled
return ((MongoDBClient) client).executeQuery(query == null ? getJPAQuery() : query, m);
}
if (MetadataUtils.useSecondryIndex(((ClientBase) client).getClientMetadata()))
{
if (kunderaQuery.isAggregated())
{
return ((MongoDBClient) client).aggregate(m,
createMongoQuery(m, getKunderaQuery().getFilterClauseQueue()), createAggregationLookup(m),
createAggregation(m), getAggregationOrderByClause(m), isSingleResult ? 1 : maxResult);
}
else
{
BasicDBObject orderByClause = getOrderByClause(m);
return ((MongoDBClient) client).loadData(m,
createMongoQuery(m, getKunderaQuery().getFilterClauseQueue()), null, orderByClause,
isSingleResult ? 1 : maxResult, firstResult, isCountQuery(),
getKeys(m, getKunderaQuery().getResult()), getKunderaQuery().getResult());
}
}
else
{
return populateUsingLucene(m, client, null, getKunderaQuery().getResult());
}
}
catch (Exception e)
{
log.error("Error during executing query, Caused by:", e);
throw new QueryHandlerException(e);
}
} |
python | def timeout_after(seconds, coro=None, *args):
'''Execute the specified coroutine and return its result. However,
issue a cancellation request to the calling task after seconds
have elapsed. When this happens, a TaskTimeout exception is
raised. If coro is None, the result of this function serves
as an asynchronous context manager that applies a timeout to a
block of statements.
timeout_after() may be composed with other timeout_after()
operations (i.e., nested timeouts). If an outer timeout expires
first, then TimeoutCancellationError is raised instead of
TaskTimeout. If an inner timeout expires and fails to properly
TaskTimeout, a UncaughtTimeoutError is raised in the outer
timeout.
'''
if coro:
return _timeout_after_func(seconds, False, coro, args)
return TimeoutAfter(seconds) |
java | private String parseErrorCodeFromHeader(Map<String, String> httpHeaders) {
String headerValue = httpHeaders.get(X_AMZN_ERROR_TYPE);
if (headerValue != null) {
int separator = headerValue.indexOf(':');
if (separator != -1) {
headerValue = headerValue.substring(0, separator);
}
}
return headerValue;
} |
java | @Override
public DeleteNamedQueryResult deleteNamedQuery(DeleteNamedQueryRequest request) {
request = beforeClientExecution(request);
return executeDeleteNamedQuery(request);
} |
python | def remove(self, item):
"""Remove either an unparsed argument string or an argument object.
:param Union[str,Arg] item: Item to remove
>>> arguments = TexArgs([RArg('arg0'), '[arg2]', '{arg3}'])
>>> arguments.remove('{arg0}')
>>> len(arguments)
2
>>> arguments[0]
OArg('arg2')
"""
item = self.__coerce(item)
self.all.remove(item)
super().remove(item) |
python | def phone_numbers(self):
"""
:rtype: twilio.rest.lookups.v1.phone_number.PhoneNumberList
"""
if self._phone_numbers is None:
self._phone_numbers = PhoneNumberList(self)
return self._phone_numbers |
java | public static List<CommercePriceEntry> findByCompanyId(long companyId,
int start, int end) {
return getPersistence().findByCompanyId(companyId, start, end);
} |
python | def __load_functions(self):
'''
Find out what functions are available on the minion
'''
return set(self.local.cmd(self.minion,
'sys.list_functions').get(self.minion, [])) |
java | @Override
public Builder claimFrom(String jsonOrJwt, String claim) throws InvalidClaimException, InvalidTokenException {
if (JwtUtils.isNullEmpty(claim)) {
String err = Tr.formatMessage(tc, "JWT_INVALID_CLAIM_ERR", new Object[] { claim });
throw new InvalidClaimException(err);
}
if (isValidToken(jsonOrJwt)) {
String decoded = jsonOrJwt;
if (JwtUtils.isBase64Encoded(jsonOrJwt)) {
decoded = JwtUtils.decodeFromBase64String(jsonOrJwt);
}
boolean isJson = JwtUtils.isJson(decoded);
if (!isJson) {
String jwtPayload = JwtUtils.getPayload(jsonOrJwt);
decoded = JwtUtils.decodeFromBase64String(jwtPayload);
}
// } else {
// // either decoded payload from jwt or encoded/decoded json string
// if (JwtUtils.isBase64Encoded(jsonOrJwt)) {
// decoded = JwtUtils.decodeFromBase64String(jsonOrJwt);
// }
// }
if (decoded != null) {
Object claimValue = null;
try {
if ((claimValue = JwtUtils.claimFromJsonObject(decoded, claim)) != null) {
claims.put(claim, claimValue);
}
} catch (JoseException e) {
String err = Tr.formatMessage(tc, "JWT_INVALID_TOKEN_ERR");
throw new InvalidTokenException(err);
}
}
}
return this;
} |
python | def mxarray_to_ndarray(libmx, pm):
"""Convert MATLAB object `pm` to numpy equivalent."""
ndims = libmx.mxGetNumberOfDimensions(pm)
dims = libmx.mxGetDimensions(pm)
numelems = libmx.mxGetNumberOfElements(pm)
elem_size = libmx.mxGetElementSize(pm)
class_name = libmx.mxGetClassName(pm)
is_numeric = libmx.mxIsNumeric(pm)
is_complex = libmx.mxIsComplex(pm)
data = libmx.mxGetData(pm)
imag_data = libmx.mxGetImagData(pm)
if is_numeric:
datasize = numelems*elem_size
real_buffer = ctypes.create_string_buffer(datasize)
ctypes.memmove(real_buffer, data, datasize)
pyarray = np.ndarray(
buffer=real_buffer,
shape=dims[:ndims],
dtype=class_name,
order='F'
)
if is_complex:
imag_buffer = ctypes.create_string_buffer(datasize)
ctypes.memmove(imag_buffer, imag_data, datasize)
pyarray_imag = np.ndarray(
buffer=imag_buffer,
shape=dims[:ndims],
dtype=class_name,
order='F'
)
pyarray = pyarray + pyarray_imag * 1j
out = pyarray.squeeze()
if out.ndim == 0:
out, = np.atleast_1d(out)
elif class_name == 'char':
datasize = numelems + 1
pystring = ctypes.create_string_buffer(datasize+1)
libmx.mxGetString(pm, pystring, datasize)
out = pystring.value
elif class_name == 'logical':
datasize = numelems*elem_size
buf = ctypes.create_string_buffer(datasize)
ctypes.memmove(buf, data, datasize)
pyarray = np.ndarray(
buffer=buf,
shape=dims[:ndims],
dtype='bool',
order='F'
)
out = pyarray.squeeze()
if out.ndim == 0:
out, = np.atleast_1d(out)
elif class_name == 'cell':
out = np.empty(numelems, dtype='O')
for i in range(numelems):
cell = libmx.mxGetCell(pm, i)
if bool(cell):
out[i] = mxarray_to_ndarray(libmx, cell)
else:
### uninitialized cell
out[i] = None
out = out.reshape(dims[:ndims], order='F')
out = out.squeeze()
elif class_name == 'struct':
field_num = libmx.mxGetNumberOfFields(pm)
### Get all field names
field_names = []
for i in range(field_num):
field_name = libmx.mxGetFieldNameByNumber(pm, i)
field_names.append(field_name)
### Get all fields
records = [] # [(x0, y0, z0), (x1, y1, z1), ... (xN, yN, zN)]
for i in range(numelems):
record = []
for field_name in field_names:
field = libmx.mxGetField(pm, i, field_name)
if bool(field):
el = mxarray_to_ndarray(libmx, field)
else:
### uninitialized cell
el = None
record.append(el)
records.append(record)
### Set the dtypes right (if there is any ndarray, we want dtype=object)
arrays = zip(*records) # [(x0, x1, ... xN), (y0, y1, ... yN), (z0, z1, ... zN)]
new_arrays = []
## This loop is necessary, because np.rec.fromarrays() cannot
## handle a list of arrays of the same size well
for arr in arrays:
contains_ndarray = np.any([isinstance(el, np.ndarray) for el in arr])
if contains_ndarray:
newarr = np.empty(len(arr), dtype='O')
for i,a in enumerate(arr):
newarr[i] = a
else:
newarr = np.array(arr)
new_arrays.append(newarr)
if new_arrays:
out = np.rec.fromarrays(new_arrays, names=field_names)
out = out.reshape(dims[:ndims], order='F')
out = out.squeeze()
else:
out = np.array([])
else:
raise NotImplementedError('{}-arrays are not supported'.format(class_name))
return out |
java | public static MozuUrl updateUrl(String cardId, String responseFields)
{
UrlFormatter formatter = new UrlFormatter("/payments/commerce/payments/cards/{cardId}?responseFields={responseFields}");
formatter.formatUrl("cardId", cardId);
formatter.formatUrl("responseFields", responseFields);
return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.PCI_POD) ;
} |
java | public String readToken(String remainder) {
final String token;
try {
Stack<Character> parens = new Stack<Character>();
int nextExpression;
for (nextExpression = 0; nextExpression < remainder.length(); nextExpression++) {
char c = remainder.charAt(nextExpression);
// check for quotation
String match = null;
for (Pattern pattern : literalPatterns) {
Matcher matcher = pattern.matcher(remainder).region(
nextExpression, remainder.length());
if (matcher.lookingAt()) {
match = matcher.group(0);
break;
}
}
if (match != null) {
// we found and can consume a quotation
nextExpression += match.length() - 1;
} else if (c == '(') {
parens.push(c);
} else if (c == ')') {
if (parens.isEmpty()) {
break;
} else {
parens.pop();
}
} else if (c == '&' || c == '|') {
break;
}
}
token = remainder.substring(0, nextExpression).trim();
} catch (Exception e) {
throw new TokenizeLogicException("Error parsing token: "
+ remainder, e);
}
if (token.isEmpty()) {
throw new TokenizeLogicException("zero-length token found.");
}
return token;
} |
python | def flatten(args):
"""
%prog flatten filename > ids
Convert a list of IDs (say, multiple IDs per line) and move them into one
per line.
For example, convert this, to this:
A,B,C | A
1 | B
a,4 | C
| 1
| a
| 4
If multi-column file with multiple elements per column, zip then flatten like so:
A,B,C 2,10,gg | A,2
1,3 4 | B,10
| C,gg
| 1,4
| 3,na
"""
from six.moves import zip_longest
p = OptionParser(flatten.__doc__)
p.set_sep(sep=",")
p.add_option("--zipflatten", default=None, dest="zipsep",
help="Specify if columns of the file should be zipped before" +
" flattening. If so, specify delimiter separating column elements" +
" [default: %default]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
tabfile, = args
zipsep = opts.zipsep
fp = must_open(tabfile)
for row in fp:
if zipsep:
row = row.rstrip()
atoms = row.split(opts.sep)
frows = []
for atom in atoms:
frows.append(atom.split(zipsep))
print("\n".join([zipsep.join(x) for x in list(zip_longest(*frows, fillvalue="na"))]))
else:
print(row.strip().replace(opts.sep, "\n")) |
java | private void handleContainerCompletion(ContainerStatus containerStatus) {
Map.Entry<Container, String> completedContainerEntry = this.containerMap.remove(containerStatus.getContainerId());
String completedInstanceName = completedContainerEntry.getValue();
LOGGER.info(String.format("Container %s running Helix instance %s has completed with exit status %d",
containerStatus.getContainerId(), completedInstanceName, containerStatus.getExitStatus()));
if (!Strings.isNullOrEmpty(containerStatus.getDiagnostics())) {
LOGGER.info(String.format("Received the following diagnostics information for container %s: %s",
containerStatus.getContainerId(), containerStatus.getDiagnostics()));
}
if (this.shutdownInProgress) {
return;
}
this.helixInstanceRetryCount.putIfAbsent(completedInstanceName, new AtomicInteger(0));
int retryCount =
this.helixInstanceRetryCount.get(completedInstanceName).incrementAndGet();
// Populate event metadata
Optional<ImmutableMap.Builder<String, String>> eventMetadataBuilder = Optional.absent();
if (this.eventSubmitter.isPresent()) {
eventMetadataBuilder = Optional.of(buildContainerStatusEventMetadata(containerStatus));
eventMetadataBuilder.get().put(GobblinYarnEventConstants.EventMetadata.HELIX_INSTANCE_ID, completedInstanceName);
eventMetadataBuilder.get().put(GobblinYarnEventConstants.EventMetadata.CONTAINER_STATUS_RETRY_ATTEMPT, retryCount + "");
}
if (this.helixInstanceMaxRetries > 0 && retryCount > this.helixInstanceMaxRetries) {
if (this.eventSubmitter.isPresent()) {
this.eventSubmitter.get().submit(GobblinYarnEventConstants.EventNames.HELIX_INSTANCE_COMPLETION,
eventMetadataBuilder.get().build());
}
LOGGER.warn("Maximum number of retries has been achieved for Helix instance " + completedInstanceName);
return;
}
// Add the Helix instance name of the completed container to the queue of unused
// instance names so they can be reused by a replacement container.
this.unusedHelixInstanceNames.offer(completedInstanceName);
if (this.eventSubmitter.isPresent()) {
this.eventSubmitter.get().submit(GobblinYarnEventConstants.EventNames.HELIX_INSTANCE_COMPLETION,
eventMetadataBuilder.get().build());
}
LOGGER.info(String.format("Requesting a new container to replace %s to run Helix instance %s",
containerStatus.getContainerId(), completedInstanceName));
this.eventBus.post(new NewContainerRequest(
shouldStickToTheSameNode(containerStatus.getExitStatus()) ?
Optional.of(completedContainerEntry.getKey()) : Optional.<Container>absent()));
} |
java | public static BitcoinTransaction convertToBitcoinTransaction(HiveBitcoinTransaction transaction) {
List<BitcoinTransactionOutput> newTransactionsOutputList = new ArrayList<>();
for (int j = 0; j < transaction.getListOfOutputs().size(); j++) {
HiveBitcoinTransactionOutput currentOutput = transaction.getListOfOutputs().get(j);
newTransactionsOutputList.add(new BitcoinTransactionOutput(currentOutput.getValue().bigDecimalValue().toBigIntegerExact(),
currentOutput.getTxOutScriptLength(), currentOutput.getTxOutScript()));
}
BitcoinTransaction result = new BitcoinTransaction(transaction.getMarker(),transaction.getFlag(),transaction.getVersion(), transaction.getInCounter(), transaction.getListOfInputs(), transaction.getOutCounter(), newTransactionsOutputList, transaction.getBitcoinScriptWitness(), transaction.getLockTime());
return result;
} |
java | public static <T> Predicate<T> in(Collection<? extends T> target) {
return t -> {
try {
return target.contains(t);
} catch (ClassCastException | NullPointerException e) {
return false;
}
};
} |
python | def erase_down (self): # <ESC>[0J -or- <ESC>[J
'''Erases the screen from the current line down to the bottom of the
screen.'''
self.erase_end_of_line ()
self.fill_region (self.cur_r + 1, 1, self.rows, self.cols) |
python | def _update_xyz(self, change):
""" Keep x,y,z in sync with position """
self.x,self.y,self.z = self.position.X(),self.position.Y(),self.position.Z() |
java | @Override
public IAuthorizationPrincipal newPrincipal(String key, Class type) {
final Tuple<String, Class> principalKey = new Tuple<>(key, type);
final Element element = this.principalCache.get(principalKey);
// principalCache is self populating, it can never return a null entry
return (IAuthorizationPrincipal) element.getObjectValue();
} |
java | @InService(SegmentServiceImpl.class)
private Type writeCheckpointFull(TableKelp table,
OutputStream os,
int saveTail)
throws IOException
{
os.write(getMinKey());
os.write(getMaxKey());
/* db/2310
if (Arrays.equals(getMinKey(), getMaxKey())) {
throw new IllegalStateException("bad keys");
}
*/
BlockLeaf []blocks = _blocks;
int index = blocks.length - (saveTail / BLOCK_SIZE);
int rowFirst = saveTail % BLOCK_SIZE;
BitsUtil.writeInt16(os, blocks.length - index);
if (blocks.length <= index) {
return Type.LEAF;
}
blocks[index].writeCheckpointFull(os, rowFirst);
for (int i = index + 1; i < blocks.length; i++) {
blocks[i].writeCheckpointFull(os, 0);
}
return Type.LEAF;
} |
java | public java.util.List<Type> getType() {
if (myType == null) {
myType = new java.util.ArrayList<Type>();
}
return myType;
} |
python | def get(self, key: str, *,
prompt: Optional[Message_T] = None,
arg_filters: Optional[List[Filter_T]] = None,
**kwargs) -> Any:
"""
Get an argument with a given key.
If the argument does not exist in the current session,
a pause exception will be raised, and the caller of
the command will know it should keep the session for
further interaction with the user.
:param key: argument key
:param prompt: prompt to ask the user
:param arg_filters: argument filters for the next user input
:return: the argument value
"""
if key in self.state:
return self.state[key]
self.current_key = key
self.current_arg_filters = arg_filters
self._current_send_kwargs = kwargs
self.pause(prompt, **kwargs) |
java | protected void assertEndBraceExists(Method executeMethod, String urlPattern, int index) {
if (index >= 0) {
throwUrlPatternEndBraceNotFoundException(executeMethod, urlPattern, index);
}
} |
java | public static <E extends Enum<E>> EnumSet<E> processBits(final Class<E> enumClass, final long value) {
return EnumUtils.processBitVector(enumClass, value);
} |
java | public void setTargetCertConstraints(CertSelector selector) {
if (selector != null)
certSelector = (CertSelector) selector.clone();
else
certSelector = null;
} |
java | @Nonnull
public static String readFile(@Nonnull File file, @Nonnull String charset) throws IOException {
return readFile(file, Charset.forName(charset));
} |
python | def color_text(text, color):
r"""
SeeAlso:
highlight_text
lexer_shortnames = sorted(ut.flatten(ut.take_column(pygments.lexers.LEXERS.values(), 2)))
"""
import utool as ut
if color is None or not ENABLE_COLORS:
return text
elif color == 'python':
return highlight_text(text, color)
elif color == 'sql':
return highlight_text(text, 'sql')
try:
import pygments
import pygments.console
# if color == 'guess':
# import linguist # NOQA
# pygments.lexers.guess_lexer(text)
# return highlight_text(text, color)
ansi_text = pygments.console.colorize(color, text)
if ut.WIN32:
import colorama
ansi_reset = (colorama.Style.RESET_ALL)
else:
ansi_reset = pygments.console.colorize('reset', '')
ansi_text = ansi_text + ansi_reset
return ansi_text
except ImportError:
return text |
python | def method_already_there(object_type, method_name, this_class_only=False):
"""
Returns True if method `method_name` is already implemented by object_type, that is, its implementation differs from
the one in `object`.
:param object_type:
:param method_name:
:param this_class_only:
:return:
"""
if this_class_only:
return method_name in vars(object_type) # or object_type.__dict__
else:
try:
method = getattr(object_type, method_name)
except AttributeError:
return False
else:
return method is not None and method is not getattr(object, method_name, None) |
java | public static String joinOptions(String[] optionArray) {
String optionString = "";
for (int i = 0; i < optionArray.length; i++) {
if (optionArray[i].equals("")) {
continue;
}
boolean escape = false;
for (int n = 0; n < optionArray[i].length(); n++) {
if (Character.isWhitespace(optionArray[i].charAt(n))) {
escape = true;
break;
}
}
if (escape) {
optionString += '"' + backQuoteChars(optionArray[i]) + '"';
} else {
optionString += optionArray[i];
}
optionString += " ";
}
return optionString.trim();
} |
java | public void writeToExcel(List<? extends Object> datas, boolean hasTitle, String path,
boolean transverse) throws IOException {
writeToExcel(datas, hasTitle, path, IN_MEMORY, transverse);
} |
java | public java.util.List<String> getRejectedPatches() {
if (rejectedPatches == null) {
rejectedPatches = new com.amazonaws.internal.SdkInternalList<String>();
}
return rejectedPatches;
} |
python | def logs_for_job(self, job_name, wait=False, poll=10): # noqa: C901 - suppress complexity warning for this method
"""Display the logs for a given training job, optionally tailing them until the
job is complete. If the output is a tty or a Jupyter cell, it will be color-coded
based on which instance the log entry is from.
Args:
job_name (str): Name of the training job to display the logs for.
wait (bool): Whether to keep looking for new log entries until the job completes (default: False).
poll (int): The interval in seconds between polling for new log entries and job completion (default: 5).
Raises:
ValueError: If waiting and the training job fails.
"""
description = self.sagemaker_client.describe_training_job(TrainingJobName=job_name)
print(secondary_training_status_message(description, None), end='')
instance_count = description['ResourceConfig']['InstanceCount']
status = description['TrainingJobStatus']
stream_names = [] # The list of log streams
positions = {} # The current position in each stream, map of stream name -> position
# Increase retries allowed (from default of 4), as we don't want waiting for a training job
# to be interrupted by a transient exception.
config = botocore.config.Config(retries={'max_attempts': 15})
client = self.boto_session.client('logs', config=config)
log_group = '/aws/sagemaker/TrainingJobs'
job_already_completed = True if status == 'Completed' or status == 'Failed' or status == 'Stopped' else False
state = LogState.TAILING if wait and not job_already_completed else LogState.COMPLETE
dot = False
color_wrap = sagemaker.logs.ColorWrap()
# The loop below implements a state machine that alternates between checking the job status and
# reading whatever is available in the logs at this point. Note, that if we were called with
# wait == False, we never check the job status.
#
# If wait == TRUE and job is not completed, the initial state is TAILING
# If wait == FALSE, the initial state is COMPLETE (doesn't matter if the job really is complete).
#
# The state table:
#
# STATE ACTIONS CONDITION NEW STATE
# ---------------- ---------------- ----------------- ----------------
# TAILING Read logs, Pause, Get status Job complete JOB_COMPLETE
# Else TAILING
# JOB_COMPLETE Read logs, Pause Any COMPLETE
# COMPLETE Read logs, Exit N/A
#
# Notes:
# - The JOB_COMPLETE state forces us to do an extra pause and read any items that got to Cloudwatch after
# the job was marked complete.
last_describe_job_call = time.time()
last_description = description
while True:
if len(stream_names) < instance_count:
# Log streams are created whenever a container starts writing to stdout/err, so this list
# may be dynamic until we have a stream for every instance.
try:
streams = client.describe_log_streams(logGroupName=log_group, logStreamNamePrefix=job_name + '/',
orderBy='LogStreamName', limit=instance_count)
stream_names = [s['logStreamName'] for s in streams['logStreams']]
positions.update([(s, sagemaker.logs.Position(timestamp=0, skip=0))
for s in stream_names if s not in positions])
except ClientError as e:
# On the very first training job run on an account, there's no log group until
# the container starts logging, so ignore any errors thrown about that
err = e.response.get('Error', {})
if err.get('Code', None) != 'ResourceNotFoundException':
raise
if len(stream_names) > 0:
if dot:
print('')
dot = False
for idx, event in sagemaker.logs.multi_stream_iter(client, log_group, stream_names, positions):
color_wrap(idx, event['message'])
ts, count = positions[stream_names[idx]]
if event['timestamp'] == ts:
positions[stream_names[idx]] = sagemaker.logs.Position(timestamp=ts, skip=count + 1)
else:
positions[stream_names[idx]] = sagemaker.logs.Position(timestamp=event['timestamp'], skip=1)
else:
dot = True
print('.', end='')
sys.stdout.flush()
if state == LogState.COMPLETE:
break
time.sleep(poll)
if state == LogState.JOB_COMPLETE:
state = LogState.COMPLETE
elif time.time() - last_describe_job_call >= 30:
description = self.sagemaker_client.describe_training_job(TrainingJobName=job_name)
last_describe_job_call = time.time()
if secondary_training_status_changed(description, last_description):
print()
print(secondary_training_status_message(description, last_description), end='')
last_description = description
status = description['TrainingJobStatus']
if status == 'Completed' or status == 'Failed' or status == 'Stopped':
print()
state = LogState.JOB_COMPLETE
if wait:
self._check_job_status(job_name, description, 'TrainingJobStatus')
if dot:
print()
# Customers are not billed for hardware provisioning, so billable time is less than total time
billable_time = (description['TrainingEndTime'] - description['TrainingStartTime']) * instance_count
print('Billable seconds:', int(billable_time.total_seconds()) + 1) |
python | def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overrided if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_sqlatable(table):
return db.session.query(SqlaTable).join(Database).filter(
SqlaTable.table_name == table.table_name,
SqlaTable.schema == table.schema,
Database.id == table.database_id,
).first()
def lookup_database(table):
return db.session.query(Database).filter_by(
database_name=table.params_dict['database_name']).one()
return import_datasource.import_datasource(
db.session, i_datasource, lookup_database, lookup_sqlatable,
import_time) |
java | @Override
public V put(K key, V value)
{
BinarySet<Entry<K,V>> es = (BinarySet<Entry<K,V>>) entrySet;
Entry<K,V> oldEntry = es.addEntry(entry(key, value));
if (oldEntry != null)
{
return oldEntry.getValue();
}
return null;
} |
python | def getall(self):
"""Returns all ACLs in a dict object.
Returns:
A Python dictionary object containing all ACL
configuration indexed by ACL name::
{
"<ACL1 name>": {...},
"<ACL2 name>": {...}
}
"""
acl_re = re.compile(r'^ip access-list (?:(standard) )?(.+)$', re.M)
response = {'standard': {}, 'extended': {}}
for acl_type, name in acl_re.findall(self.config):
acl = self.get(name)
if acl_type and acl_type == 'standard':
response['standard'][name] = acl
else:
response['extended'][name] = acl
return response |
java | public static Object invokeReadMethodOptional(
Object bean, String propertyName)
{
Class<?> c = bean.getClass();
Method method = getReadMethodOptional(c, propertyName);
if (method == null)
{
return null;
}
return Methods.invokeOptional(method, bean);
} |
python | def search_for_parent_dir(start_at: str=None, with_files: set=None,
with_dirs: set=None) -> str:
"""Return absolute path of first parent directory of `start_at` that
contains all files `with_files` and all dirs `with_dirs`
(including `start_at`).
If `start_at` not specified, start at current working directory.
:param start_at: Initial path for searching for the project build file.
Returns `None` upon reaching FS root without finding a project buildfile.
"""
if not start_at:
start_at = os.path.abspath(os.curdir)
if not with_files:
with_files = set()
if not with_dirs:
with_dirs = set()
exp_hits = len(with_files) + len(with_dirs)
while start_at:
num_hits = 0
for entry in scandir(start_at):
if ((entry.is_file() and entry.name in with_files) or
(entry.is_dir() and entry.name in with_dirs)):
num_hits += 1
if num_hits == exp_hits:
return start_at
cur_level = start_at
start_at = os.path.split(cur_level)[0]
if os.path.realpath(cur_level) == os.path.realpath(start_at):
# looped on root once
break |
java | private static MethodRef getBuilderMethod(Descriptor descriptor) {
TypeInfo message = messageRuntimeType(descriptor);
TypeInfo builder = builderRuntimeType(descriptor);
return MethodRef.createStaticMethod(
message, new Method("newBuilder", builder.type(), NO_METHOD_ARGS))
.asNonNullable();
} |
java | public void clear(String layerName) {
synchronized (this) {
log.info("clearing cache for layer " + layerName);
for (String key : cache.keySet()) {
if (key.contains(layerName))
remove(key);
}
}
} |
java | public static PolicyLimit findPolicyLimitByUserAndCounter(EntityManager em, PrincipalUser user, PolicyCounter counter) {
TypedQuery<PolicyLimit> query = em.createNamedQuery("PolicyLimit.findPolicyLimitByUserAndCounter", PolicyLimit.class);
try {
query.setParameter("user", user);
query.setParameter("counter", counter);
return query.getSingleResult();
} catch (NoResultException ex) {
return null;
}
} |
java | @Override
public long dynamicQueryCount(DynamicQuery dynamicQuery,
Projection projection) {
return commerceDiscountRelPersistence.countWithDynamicQuery(dynamicQuery,
projection);
} |
python | def domains(self, history=None):
"""
Get the set of I{all} domain names.
@param history: A history of nodes checked to prevent
circular hunting.
@type history: [L{Properties},..]
@return: A set of domain names.
@rtype: list
"""
if history is None:
history = []
history.append(self)
domains = set()
domains.add(self.domain)
for x in self.links:
if x in history:
continue
domains.update(x.domains(history))
history.remove(self)
return domains |
java | @Override
public void clearCache(CPDefinitionGroupedEntry cpDefinitionGroupedEntry) {
entityCache.removeResult(CPDefinitionGroupedEntryModelImpl.ENTITY_CACHE_ENABLED,
CPDefinitionGroupedEntryImpl.class,
cpDefinitionGroupedEntry.getPrimaryKey());
finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITH_PAGINATION);
finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION);
clearUniqueFindersCache((CPDefinitionGroupedEntryModelImpl)cpDefinitionGroupedEntry,
true);
} |
java | public static List<UIComponent> resolveComponents(FacesContext context, UIComponent source, String expressions) {
return resolveComponents(context, source, expressions, SearchExpressionHint.NONE);
} |
python | def logout(request):
"""View to forget the user"""
request.response.headers.extend(forget(request))
return {'redirect': request.POST.get('came_from', '/')} |
java | public static Slice wrappedBooleanArray(boolean[] array, int offset, int length)
{
if (length == 0) {
return EMPTY_SLICE;
}
return new Slice(array, offset, length);
} |
python | def _set_tracker_uri(self, uri):
"""
Called when we start a new resumable upload or get a new tracker
URI for the upload. Saves URI and resets upload state.
Raises InvalidUriError if URI is syntactically invalid.
"""
parse_result = urlparse.urlparse(uri)
if (parse_result.scheme.lower() not in ['http', 'https'] or
not parse_result.netloc or not parse_result.query):
raise InvalidUriError('Invalid tracker URI (%s)' % uri)
qdict = cgi.parse_qs(parse_result.query)
if not qdict or not 'upload_id' in qdict:
raise InvalidUriError('Invalid tracker URI (%s)' % uri)
self.tracker_uri = uri
self.tracker_uri_host = parse_result.netloc
self.tracker_uri_path = '%s/?%s' % (parse_result.netloc,
parse_result.query)
self.server_has_bytes = 0 |
python | def dns(self):
"""DNS details."""
dns = {
'elb': self.dns_elb(),
'elb_region': self.dns_elb_region(),
'global': self.dns_global(),
'region': self.dns_region(),
'instance': self.dns_instance(),
}
return dns |
java | public static String doGet(final String url, final int retryTimes) {
try {
return doGetByLoop(url, retryTimes);
} catch (HttpException e) {
throw new HttpException(format("Failed to download content for url: '%s'. Tried '%s' times",
url, Math.max(retryTimes + 1, 1)));
}
} |
python | def BinToTri(self, a, b):
'''
Turn an a-b coord to an x-y-z triangular coord .
if z is negative, calc with its abs then return (a, -b).
:param a,b: the numbers of the a-b coord
:type a,b: float or double are both OK, just numbers
:return: the corresponding x-y-z triangular coord
:rtype: a tuple consist of x,y,z
'''
if (b >= 0):
y = a - b / np.sqrt(3)
z = b * 2 / np.sqrt(3)
x = 100 - (a + b / np.sqrt(3))
return (x, y, z)
else:
y = a + b / np.sqrt(3)
z = b * 2 / np.sqrt(3)
x = 100 - (a - b / np.sqrt(3))
return (x, y, z) |
python | def files(self):
"""
Yield relative file paths specified in :attr:`metainfo`
Each paths starts with :attr:`name`.
Note that the paths may not exist. See :attr:`filepaths` for existing
files.
"""
info = self.metainfo['info']
if 'length' in info: # Singlefile
yield info['name']
elif 'files' in info: # Multifile torrent
rootdir = self.name
for fileinfo in info['files']:
yield os.path.join(rootdir, os.path.join(*fileinfo['path'])) |
java | public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain chain) throws ServletException, IOException
{
servletRequest.setCharacterEncoding("UTF-8");
httpRequest.set(servletRequest);
httpResponse.set(servletResponse);
Request appRequest = null;
try
{
// String sessionToken = ServletSupport.getSessionTokenFromCookie(servletRequest);
String sessionToken = ServletSupport.getCookieValue(servletRequest, SESSION_TOKEN_KEY);
String userId = ServletSupport.getCookieValue(servletRequest, USER_ID_KEY);
if("".equals(sessionToken))
{
sessionToken = null;
}
if("".equals(userId))
{
userId = null;
}
if(accessManager != null) {
appRequest = accessManager.bindRequest(this);
// System.out.println("request bound!! " + appRequest);
Session session = appRequest.resolveSession(sessionToken, userId);
}
if (this.syncUserPrefs && appRequest.getTimesEntered() == 0)
{
//pass user preferences here
ServletSupport.importCookieValues(servletRequest, appRequest.getUserSettings());
ServletSupport.exportCookieValues(servletResponse, appRequest.getUserSettings(), "/", userPrefsMaxAge, Arrays.asList(new String[]{SESSION_TOKEN_KEY}));
}
//if a user logged in, the user id must be stored
if(userId == null)
{
User user = appRequest.getUser();
if(user != null)
{
storeSessionDataInCookie(USER_ID_KEY, user.getId(), servletResponse);
}
}
//role based access control
// checkAccess(servletRequest, appRequest);
//delegate request
chain.doFilter(servletRequest, servletResponse);
}
catch (Throwable t)//make sure user gets a controlled response
{
//is this the actual entry point or is this entry point wrapped?
if ( appRequest != null && appRequest.getTimesEntered() > 1)
{
//it's wrapped, so the exception must be thrown at the top entry point
ServletSupport.rethrow(t);
}
handleException(servletRequest, servletResponse, t);
}
finally
{
/* if(loggingEnabled)
{
application.log(new PageVisitLogEntry((HttpServletRequest) servletRequest));
}
application.releaseRequest(); */
if(accessManager != null) {
accessManager.releaseRequest();
}
}
} |
python | def list_statistics(self, begin_date, end_date, shop_id=-1):
"""
Wi-Fi数据统计
详情请参考
http://mp.weixin.qq.com/wiki/8/dfa2b756b66fca5d9b1211bc18812698.html
:param begin_date: 起始日期时间,最长时间跨度为30天
:param end_date: 结束日期时间戳,最长时间跨度为30天
:param shop_id: 可选,门店 ID,按门店ID搜索,-1为总统计
:return: 返回的 JSON 数据包
"""
if isinstance(begin_date, (datetime, date)):
begin_date = begin_date.strftime('%Y-%m-%d')
if isinstance(end_date, (datetime, date)):
end_date = end_date.strftime('%Y-%m-%d')
res = self._post(
'statistics/list',
data={
'begin_date': begin_date,
'end_date': end_date,
'shop_id': shop_id
},
result_processor=lambda x: x['data']
)
return res |
java | public int getNumPoints() {
if (isEmpty()) {
return 0;
}
int total = 0;
for (Polygon polygon : polygons) {
total += polygon.getNumPoints();
}
return total;
} |
python | def serialize(self, value):
"""Convert the external Python value to a type that is suitable for
storing in a Mutagen file object.
"""
if isinstance(value, float) and self.as_type is six.text_type:
value = u'{0:.{1}f}'.format(value, self.float_places)
value = self.as_type(value)
elif self.as_type is six.text_type:
if isinstance(value, bool):
# Store bools as 1/0 instead of True/False.
value = six.text_type(int(bool(value)))
elif isinstance(value, bytes):
value = value.decode('utf-8', 'ignore')
else:
value = six.text_type(value)
else:
value = self.as_type(value)
if self.suffix:
value += self.suffix
return value |
python | def set_cookie(self, cookie=None):
"""Set the Cookie header."""
if cookie:
self._cookie = cookie.encode()
else:
self._cookie = None |
python | def _get_dynamic_attr(self, attname, obj, default=None):
"""
Copied from django.contrib.syndication.views.Feed (v1.7.1)
"""
try:
attr = getattr(self, attname)
except AttributeError:
return default
if callable(attr):
# Check co_argcount rather than try/excepting the function and
# catching the TypeError, because something inside the function
# may raise the TypeError. This technique is more accurate.
try:
code = six.get_function_code(attr)
except AttributeError:
code = six.get_function_code(attr.__call__)
if code.co_argcount == 2: # one argument is 'self'
return attr(obj)
else:
return attr()
return attr |
java | public static String combine(String str1, String str2, String separator) {
if (separator == null || separator.isEmpty()) {
return str1 == null ? str2 : str1.concat(str2);
}
if (str1 == null)
str1 = "";
if (str2 == null)
str2 = "";
StringBuilder builder = new StringBuilder();
if (str1.endsWith(separator)) {
builder.append(str1.substring(0, str1.length() - separator.length()));
} else {
builder.append(str1);
}
builder.append(separator);
if (str2.startsWith(separator)) {
builder.append(str2.substring(separator.length()));
} else {
builder.append(str2);
}
return builder.toString();
} |
java | @Override
public int compareTo(Enhancement o) {
if (this.equals(o))
return 0;
if (this.confidence > o.getConfidence())
return -1;
else if (this.confidence < o.getConfidence())
return 1;
else
return 0;
} |
java | public ServiceFuture<List<PhraseListFeatureInfo>> listPhraseListsAsync(UUID appId, String versionId, ListPhraseListsOptionalParameter listPhraseListsOptionalParameter, final ServiceCallback<List<PhraseListFeatureInfo>> serviceCallback) {
return ServiceFuture.fromResponse(listPhraseListsWithServiceResponseAsync(appId, versionId, listPhraseListsOptionalParameter), serviceCallback);
} |
python | def _parse(self, date_str, format='%Y-%m-%d'):
"""
helper function for parsing FRED date string into datetime
"""
rv = pd.to_datetime(date_str, format=format)
if hasattr(rv, 'to_pydatetime'):
rv = rv.to_pydatetime()
return rv |
python | def get_feature(self, croplayer_id, cropfeature_id):
"""
Gets a crop feature
:param int croplayer_id: ID of a cropping layer
:param int cropfeature_id: ID of a cropping feature
:rtype: CropFeature
"""
target_url = self.client.get_url('CROPFEATURE', 'GET', 'single', {'croplayer_id': croplayer_id, 'cropfeature_id': cropfeature_id})
return self.client.get_manager(CropFeature)._get(target_url) |
java | public Node removeNamedItemNS(String namespaceURI, String localName)
throws DOMException {
throw new DOMException(DOMException.NO_MODIFICATION_ALLOWED_ERR, null);
} |
java | public static List<AvailableNumber> searchLocal(final BandwidthClient client, final Map<String, Object>params)
throws Exception {
final String tollFreeUri = BandwidthConstants.AVAILABLE_NUMBERS_LOCAL_URI_PATH;
final JSONArray array = toJSONArray(client.get(tollFreeUri, params));
final List<AvailableNumber> numbers = new ArrayList<AvailableNumber>();
for (final Object obj : array) {
numbers.add(new AvailableNumber(client, (JSONObject) obj));
}
return numbers;
} |
python | def collision_rate(Temperature, element, isotope):
r"""This function recieves the temperature of an atomic vapour (in Kelvin),
the element, and the isotope of the atoms, and returns the angular
frequency rate of collisions (in rad/s) in a vapour assuming a
Maxwell-Boltzmann velocity distribution, and taking the cross section
of the collision to be
sigma=pi*(2*r)**2
where r is the atomic radius. colission rate returned is
gamma_col=2*pi* ( sigma * v * n )
where v is the average velocity of the distribution, and n is the
number density of the vapour.
A few examples (in Hz):
>>> print collision_rate(25 + 273.15, "Cs", 133)/2/pi
9.0607260277
For cesium collisions become important for temperatures above 120 Celsius.
>>> print collision_rate(120 + 273.15, "Cs", 133)/2/pi
10519.235289
"""
atom=Atom(element,isotope)
sigma=pi*(2*atom.radius)**2
v=speed_average(Temperature,element,isotope)
n=vapour_number_density(Temperature,element)
return 2*pi*sigma*v*n |
java | @NotNull
@ObjectiveCName("validatePasswordCommand:")
public Command<AuthState> validatePassword(String password) {
return modules.getAuthModule().requestValidatePassword(password);
} |
java | public void log(int level, String message, Throwable exception) {
doLog(message, level, null, exception);
} |
python | def _do_tcp_check(self, ip, results):
"""
Attempt to establish a TCP connection.
If not successful, record the IP in the results dict.
Always closes the connection at the end.
"""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
sock.connect((ip, self.conf['tcp_check_port']))
except:
# Any problem during the connection attempt? We won't diagnose it,
# we just indicate failure by adding the IP to the list
results.append(ip)
finally:
sock.close() |
Subsets and Splits