language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def _interpret_oserror(exc, cwd, cmd):
"""Interpret an OSError exc and raise the appropriate dbt exception.
"""
if len(cmd) == 0:
raise dbt.exceptions.CommandError(cwd, cmd)
# all of these functions raise unconditionally
if os.name == 'nt':
_handle_windows_error(exc, cwd, cmd)
else:
_handle_posix_error(exc, cwd, cmd)
# this should not be reachable, raise _something_ at least!
raise dbt.exceptions.InternalException(
'Unhandled exception in _interpret_oserror: {}'.format(exc)
) |
python | def is_cell_big(self, cell_detection):
"""
Check if the cell is considered big.
@param CellFeature cell_detection:
@return:
"""
return cell_detection.area > self.parameters_tracking["big_size"] * self.scale * self.scale |
python | def send(self, message):
"""Make a Twilio SendGrid v3 API request with the request body generated by
the Mail object
:param message: The Twilio SendGrid v3 API request body generated by the Mail
object
:type message: Mail
"""
if isinstance(message, dict):
response = self.client.mail.send.post(request_body=message)
else:
response = self.client.mail.send.post(request_body=message.get())
return response |
java | @Override
public void clear() {
super.clear();
if (this.nicosep != null) {
final N child = this.nicosep;
setIcosepChild(null);
child.clear();
}
} |
python | def _queue_job(redis, queue_name, delegate, *args):
"""
creates a new job on the queue
:param redis: redis
:param delegate: method to be executed by the queue. Use fully qualified method name as String.
:param args: arguments of the method
:return: job_id
"""
log.info('Queuing job...')
with Connection(redis):
q = Queue(queue_name)
job = q.enqueue(delegate, *args)
return job.id |
java | public static ScientificNumberFormatter getMarkupInstance(
DecimalFormat df,
String beginMarkup,
String endMarkup) {
return getInstance(
df, new MarkupStyle(beginMarkup, endMarkup));
} |
java | @Override
public void relocate()
{
int w = 360, h = 245;
int x = (this.getWidth() - w) / 2;
int y = (this.getHeight() - h) / 2;
outputLabel.setLocation(x, y);
outputPathField.setLocation(x + 160, y);
enableZipEncodingCompression.setLocation(x + 110, y + 40);
outputCompression.setLocation(x + 110, y + 75);
disableOutputCompression.setLocation(x + 110, y + 100);
enableBZip2OutputCompression.setLocation(x + 110, y + 120);
enable7ZipOutputCompression.setLocation(x + 110, y + 140);
activateDataFileOutput.setLocation(x + 110, y + 160);
enableMultipleOutputFiles.setLocation(x, y + 190);
outputSizeLimitLabel.setLocation(x, y + 220);
outputSizeLimitField.setLocation(x + 160, y + 220);
} |
python | def file_or_stderr(filename, *, mode="a", encoding="utf-8"):
"""Returns a context object wrapping either the given file or
stderr (if filename is None). This makes dealing with log files
more convenient.
"""
if filename is not None:
return open(filename, mode, encoding=encoding)
@contextmanager
def stderr_wrapper():
yield sys.stderr
return stderr_wrapper() |
java | private Multimap<String, TokenRange> describeCassandraTopology(final Keyspace keyspace) {
try {
@SuppressWarnings ("unchecked")
ConnectionPool<Cassandra.Client> connectionPool = (ConnectionPool<Cassandra.Client>) keyspace.getConnectionPool();
return connectionPool.executeWithFailover(
new AbstractKeyspaceOperationImpl<Multimap<String, TokenRange>>(EmptyKeyspaceTracerFactory.getInstance().newTracer(CassandraOperationType.DESCRIBE_RING), keyspace.getKeyspaceName()) {
@Override
protected Multimap<String, TokenRange> internalExecute(Cassandra.Client client, ConnectionContext state)
throws Exception {
Multimap<String, TokenRange> racks = ArrayListMultimap.create();
for (org.apache.cassandra.thrift.TokenRange tokenRange : client.describe_local_ring(getKeyspace())) {
// The final local endpoint "owns" the token range, the rest are for replication
EndpointDetails endpointDetails = Iterables.getLast(tokenRange.getEndpoint_details());
racks.put(endpointDetails.getRack(),
new TokenRangeImpl(tokenRange.getStart_token(), tokenRange.getEnd_token(), tokenRange.getEndpoints()));
}
return Multimaps.unmodifiableMultimap(racks);
}
},
keyspace.getConfig().getRetryPolicy().duplicate()).getResult();
} catch (ConnectionException e) {
throw Throwables.propagate(e);
}
} |
java | public <T> T resolveFuture(final Future<T> future, final boolean autocancel) throws RuntimeException
{
while (isValid() && !future.isCancelled())
{
try
{
return future.get(getTimeLeft(), TimeUnit.MILLISECONDS);
}
catch (InterruptedException e)
{
// ignore and start waiting again
}
catch (ExecutionException e)
{
try
{
if (autocancel)
future.cancel(true);
}
catch (Throwable t)
{
log.info("{resolveFuture} Error auto-cancelling after ExecutionException: " + t.getMessage(), t);
}
throw new RuntimeException(e);
}
catch (TimeoutException e)
{
break;
}
}
// We timed out
future.cancel(true);
return null;
} |
java | public void clear() {
try {
events.cleanup();
events = new XFastEventList(this.attributeMapSerializer);
} catch (IOException e) {
e.printStackTrace();
}
} |
python | def get_firewall_rules(self, server):
"""
Return all FirewallRule objects based on a server instance or uuid.
"""
server_uuid, server_instance = uuid_and_instance(server)
url = '/server/{0}/firewall_rule'.format(server_uuid)
res = self.get_request(url)
return [
FirewallRule(server=server_instance, **firewall_rule)
for firewall_rule in res['firewall_rules']['firewall_rule']
] |
java | public static <T> T construct(String classname, String readable) throws ConfigurationException
{
Class<T> cls = FBUtilities.classForName(classname, readable);
try
{
return cls.newInstance();
}
catch (IllegalAccessException e)
{
throw new ConfigurationException(String.format("Default constructor for %s class '%s' is inaccessible.", readable, classname));
}
catch (InstantiationException e)
{
throw new ConfigurationException(String.format("Cannot use abstract class '%s' as %s.", classname, readable));
}
catch (Exception e)
{
// Catch-all because Class.newInstance() "propagates any exception thrown by the nullary constructor, including a checked exception".
if (e.getCause() instanceof ConfigurationException)
throw (ConfigurationException)e.getCause();
throw new ConfigurationException(String.format("Error instantiating %s class '%s'.", readable, classname), e);
}
} |
java | public List<GeneratorOutput> getCheckOutput(Filer filer) throws IOException
{
HashMap<String,Object> map = new HashMap<String,Object>();
map.put("impl", this); // control implementation
map.put("init", _init); // control impl initializer
Writer writer = new IndentingWriter(filer.createSourceFile(_init.getClassName()));
GeneratorOutput genOut =
new GeneratorOutput(writer,"org/apache/beehive/controls/runtime/generator/ImplInitializer.vm",
map);
ArrayList<GeneratorOutput> genList = new ArrayList<GeneratorOutput>(1);
genList.add(genOut);
return genList;
} |
python | def space_clone(args):
""" Replicate a workspace """
# FIXME: add --deep copy option (shallow by default)
# add aliasing capability, then make space_copy alias
if not args.to_workspace:
args.to_workspace = args.workspace
if not args.to_project:
args.to_project = args.project
if (args.project == args.to_project
and args.workspace == args.to_workspace):
eprint("Error: destination project and namespace must differ from"
" cloned workspace")
return 1
r = fapi.clone_workspace(args.project, args.workspace, args.to_project,
args.to_workspace)
fapi._check_response_code(r, 201)
if fcconfig.verbosity:
msg = "{}/{} successfully cloned to {}/{}".format(
args.project, args.workspace,
args.to_project, args.to_workspace)
print(msg)
return 0 |
java | @RequestMapping(value = "/qrpay")
public void qrPay(
@RequestParam("orderNumber") String orderNumber,
HttpServletResponse response){
try {
String qrUrl = wepaySupport.qrPay(orderNumber);
response.sendRedirect(qrUrl);
} catch (IOException e) {
log.error("failed to qr pay(orderNumber={}), cause: {}",
orderNumber, e.getMessage());
}
} |
python | def set_shape(self, shape):
"""Update the shape."""
channels = shape[-1]
acceptable_channels = ACCEPTABLE_CHANNELS[self._encoding_format]
if channels not in acceptable_channels:
raise ValueError('Acceptable `channels` for %s: %s (was %s)' % (
self._encoding_format, acceptable_channels, channels))
self._shape = tuple(shape) |
java | public static void notNullOrEmpty(String parameter, String name) throws IllegalArgumentException {
if (parameter == null || parameter.isEmpty()) {
throw new IllegalArgumentException(name + " is null or empty.");
}
} |
java | @SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case AfplibPackage.PGD__XPG_BASE:
setXpgBase((Integer)newValue);
return;
case AfplibPackage.PGD__YPG_BASE:
setYpgBase((Integer)newValue);
return;
case AfplibPackage.PGD__XPG_UNITS:
setXpgUnits((Integer)newValue);
return;
case AfplibPackage.PGD__YPG_UNITS:
setYpgUnits((Integer)newValue);
return;
case AfplibPackage.PGD__XPG_SIZE:
setXpgSize((Integer)newValue);
return;
case AfplibPackage.PGD__YPG_SIZE:
setYpgSize((Integer)newValue);
return;
case AfplibPackage.PGD__RESERVED:
setReserved((Integer)newValue);
return;
case AfplibPackage.PGD__TRIPLETS:
getTriplets().clear();
getTriplets().addAll((Collection<? extends Triplet>)newValue);
return;
}
super.eSet(featureID, newValue);
} |
java | public SessionInfo getLockSessionInfo(Task objSession, String strUserName)
{
if (objSession == null)
{
Utility.getLogger().warning("null session");
return new SessionInfo(0, strUserName); //
}
SessionInfo intSession = (SessionInfo)m_hmLockSessions.get(objSession);
if (intSession == null)
m_hmLockSessions.put(objSession, intSession = new SessionInfo(m_iNextLockSession++, strUserName));
return intSession;
} |
java | protected BufferedImage create_FOREGROUND_Image(final int WIDTH, final boolean WITH_CENTER_KNOB, final ForegroundType TYPE, final BufferedImage IMAGE) {
switch (getFrameType()) {
case ROUND:
return FOREGROUND_FACTORY.createRadialForeground(WIDTH, WITH_CENTER_KNOB, TYPE, IMAGE);
case SQUARE:
return FOREGROUND_FACTORY.createLinearForeground(WIDTH, WIDTH, WITH_CENTER_KNOB, IMAGE);
default:
return FOREGROUND_FACTORY.createRadialForeground(WIDTH, WITH_CENTER_KNOB, ForegroundType.FG_TYPE1, IMAGE);
}
} |
java | public Observable<ImagePrediction> predictImageUrlWithNoStoreAsync(UUID projectId, PredictImageUrlWithNoStoreOptionalParameter predictImageUrlWithNoStoreOptionalParameter) {
return predictImageUrlWithNoStoreWithServiceResponseAsync(projectId, predictImageUrlWithNoStoreOptionalParameter).map(new Func1<ServiceResponse<ImagePrediction>, ImagePrediction>() {
@Override
public ImagePrediction call(ServiceResponse<ImagePrediction> response) {
return response.body();
}
});
} |
python | def status(output=True, tgt='*', tgt_type='glob', timeout=None, gather_job_timeout=None):
'''
.. versionchanged:: 2017.7.0
The ``expr_form`` argument has been renamed to ``tgt_type``, earlier
releases must use ``expr_form``.
Print the status of all known salt minions
CLI Example:
.. code-block:: bash
salt-run manage.status
salt-run manage.status tgt="webservers" tgt_type="nodegroup"
salt-run manage.status timeout=5 gather_job_timeout=10
'''
ret = {}
if not timeout:
timeout = __opts__['timeout']
if not gather_job_timeout:
gather_job_timeout = __opts__['gather_job_timeout']
res = _ping(tgt, tgt_type, timeout, gather_job_timeout)
ret['up'], ret['down'] = ([], []) if not res else res
return ret |
python | def set_log_level(self, level):
"""Set the logging level.
Parameters
----------
level : logging level constant
The value to set the logging level to.
"""
self._log_level = level
if self._python_logger:
try:
level = self.PYTHON_LEVEL.get(level)
except ValueError as err:
raise FailReply("Unknown logging level '%s'" % (level))
self._python_logger.setLevel(level) |
java | private void initialize() {
// calculate the distinct groups of this role
Set<String> distinctGroups = new HashSet<String>(getAllGroupNames());
// by using a set first we eliminate duplicate names
m_distictGroupNames = Collections.unmodifiableList(new ArrayList<String>(distinctGroups));
} |
java | public static Action getAndCheckAction(EntityDataModel entityDataModel, String actionName) {
int namespaceLastIndex = actionName.lastIndexOf('.');
String namespace = actionName.substring(0, namespaceLastIndex);
String simpleActionName = actionName.substring(namespaceLastIndex + 1);
Schema schema = entityDataModel.getSchema(namespace);
if (schema == null) {
throw new ODataSystemException("Could not find schema in entity data model with namespace: " +
namespace);
}
Action action = schema.getAction(simpleActionName);
if (action == null) {
throw new ODataSystemException("Action not found in entity data model: " + actionName);
}
return action;
} |
java | @NonNull
@Deprecated
public static LocationEngine getBestLocationEngine(@NonNull Context context, boolean background) {
return getBestLocationEngine(context);
} |
python | def main():
""" Get arguments and call the execution function"""
if len(sys.argv) < 6:
print("Usage: %s server_url username password namespace' \
' classname" % sys.argv[0])
print('Using internal defaults')
server_url = SERVER_URL
namespace = TEST_NAMESPACE
username = USERNAME
password = PASSWORD
classname = TEST_CLASS
else:
print('Get from input')
server_url = sys.argv[1]
namespace = sys.argv[2]
username = sys.argv[3]
password = sys.argv[4]
classname = sys.argv[5]
# create the credentials tuple for WBEMConnection
creds = (username, password)
# call the method to execute the request and display results
execute_request(server_url, creds, namespace, classname)
return 0 |
python | def update_translations(self, project_id, file_path=None,
language_code=None, overwrite=False, fuzzy_trigger=None):
"""
Updates translations
overwrite: set it to True if you want to overwrite definitions
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
"""
return self._upload(
project_id=project_id,
updating=self.UPDATING_TRANSLATIONS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
fuzzy_trigger=fuzzy_trigger
) |
java | public void marshall(ConfigStreamDeliveryInfo configStreamDeliveryInfo, ProtocolMarshaller protocolMarshaller) {
if (configStreamDeliveryInfo == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(configStreamDeliveryInfo.getLastStatus(), LASTSTATUS_BINDING);
protocolMarshaller.marshall(configStreamDeliveryInfo.getLastErrorCode(), LASTERRORCODE_BINDING);
protocolMarshaller.marshall(configStreamDeliveryInfo.getLastErrorMessage(), LASTERRORMESSAGE_BINDING);
protocolMarshaller.marshall(configStreamDeliveryInfo.getLastStatusChangeTime(), LASTSTATUSCHANGETIME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def remove(path, **kwargs):
r'''
Remove the directory from the SYSTEM path
Returns:
boolean True if successful, False if unsuccessful
rehash : True
If the registry was updated, and this value is set to ``True``, sends a
WM_SETTINGCHANGE broadcast to refresh the environment variables. Set
this to ``False`` to skip this broadcast.
CLI Example:
.. code-block:: bash
# Will remove C:\Python27 from the path
salt '*' win_path.remove 'c:\\python27'
'''
kwargs = salt.utils.args.clean_kwargs(**kwargs)
rehash_ = kwargs.pop('rehash', True)
if kwargs:
salt.utils.args.invalid_kwargs(kwargs)
path = _normalize_dir(path)
path_str = salt.utils.stringutils.to_str(path)
system_path = get_path()
# The current path should not have any unicode in it, but don't take any
# chances.
local_path = [
salt.utils.stringutils.to_str(x)
for x in os.environ['PATH'].split(PATHSEP)
]
def _check_path(dirs, path):
'''
Check the dir list for the specified path, and make changes to the list
if needed. Return True if changes were made to the list, otherwise
return False.
'''
dirs_lc = [x.lower() for x in dirs]
path_lc = path.lower()
new_dirs = []
for index, dirname in enumerate(dirs_lc):
if path_lc != dirname:
new_dirs.append(dirs[index])
if len(new_dirs) != len(dirs):
dirs[:] = new_dirs[:]
return True
else:
return False
if _check_path(local_path, path_str):
_update_local_path(local_path)
if not _check_path(system_path, path):
# No changes necessary
return True
result = __utils__['reg.set_value'](
HIVE,
KEY,
VNAME,
';'.join(salt.utils.data.decode(system_path)),
VTYPE
)
if result and rehash_:
# Broadcast WM_SETTINGCHANGE to Windows if registry was updated
return rehash()
else:
return result |
java | public Canvas getViewPanel() {
VLayout layout = new VLayout(5);
layout.setPadding(5);
MapWidget mapWidget = new MapWidget("mapGuwOsm", "appGuw");
final RibbonBarLayout ribbonBar = new RibbonBarLayout(mapWidget, "appGuw", "guwRibbonBar1");
ribbonBar.setSize("100%", "94px");
ToolStrip toolStrip = new ToolStrip();
toolStrip.setWidth100();
ToolStripButton btn1 = new ToolStripButton("Toggle group title");
btn1.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
ribbonBar.setShowGroupTitles(!ribbonBar.isShowGroupTitles());
}
});
toolStrip.addButton(btn1);
layout.addMember(toolStrip);
layout.addMember(ribbonBar);
layout.addMember(mapWidget);
return layout;
} |
java | @Override
public void updateBinaryStream( String columnLabel,
InputStream x,
int length ) throws SQLException {
notClosed();
noUpdates();
} |
java | public EnvelopesInformation listStatus(String accountId, EnvelopeIdsRequest envelopeIdsRequest) throws ApiException {
return listStatus(accountId, envelopeIdsRequest, null);
} |
python | def map_sections(fun, neurites, neurite_type=NeuriteType.all, iterator_type=Tree.ipreorder):
'''Map `fun` to all the sections in a collection of neurites'''
return map(fun, iter_sections(neurites,
iterator_type=iterator_type,
neurite_filter=is_type(neurite_type))) |
python | def normalize(X):
"""
MinMax normalization to fit a matrix in the space [0,1] by column.
"""
a = X.min(axis=0)
b = X.max(axis=0)
return (X - a[np.newaxis, :]) / ((b - a)[np.newaxis, :]) |
python | def from_resource_deeper(
self, resource_id=None, limit_depth=1000000, db_session=None, *args, **kwargs
):
"""
This returns you subtree of ordered objects relative
to the start resource_id (currently only implemented in postgresql)
:param resource_id:
:param limit_depth:
:param db_session:
:return:
"""
return self.service.from_resource_deeper(
resource_id=resource_id,
limit_depth=limit_depth,
db_session=db_session,
*args,
**kwargs
) |
python | def var_str(name, shape):
"""Return a sequence of strings naming the element of the tallyable object.
:Example:
>>> var_str('theta', (4,))
['theta[1]', 'theta[2]', 'theta[3]', 'theta[4]']
"""
size = prod(shape)
ind = (indices(shape) + 1).reshape(-1, size)
names = ['[' + ','.join(list(map(str, i))) + ']' for i in zip(*ind)]
names[0] = '%s %s' % (name, names[0])
return names |
java | @NonNull
private static ObservableTransformer<byte[], PresenterEvent> transformToPresenterEvent(Type type) {
return observable -> observable.map(writtenBytes -> ((PresenterEvent) new ResultEvent(writtenBytes, type)))
.onErrorReturn(throwable -> new ErrorEvent(throwable, type));
} |
java | @Override
public View getDropDownView(int position, View convertView, ViewGroup parent) {
// depending on the position, use super method or create our own
// we don't need to inflate a footer view if it uses the default resource, the superclass will do it:
if(footer == null || footerResource == 0 || position != getCount()-1) {
// we have a normal item or a footer with same resource
return setTextsAndCheck(inflater.inflate(dropDownResource, parent, false), getItem(position), true);
} else {
// if we want the footer, create it:
return setTextsAndCheck(inflater.inflate(footerResource, parent, false), footer, true);
}
} |
python | def seqToKV(seq, strict=False):
"""Represent a sequence of pairs of strings as newline-terminated
key:value pairs. The pairs are generated in the order given.
@param seq: The pairs
@type seq: [(str, (unicode|str))]
@return: A string representation of the sequence
@rtype: bytes
"""
def err(msg):
formatted = 'seqToKV warning: %s: %r' % (msg, seq)
if strict:
raise KVFormError(formatted)
else:
logging.warning(formatted)
lines = []
for k, v in seq:
if isinstance(k, bytes):
k = k.decode('utf-8')
elif not isinstance(k, str):
err('Converting key to string: %r' % k)
k = str(k)
if '\n' in k:
raise KVFormError(
'Invalid input for seqToKV: key contains newline: %r' % (k, ))
if ':' in k:
raise KVFormError(
'Invalid input for seqToKV: key contains colon: %r' % (k, ))
if k.strip() != k:
err('Key has whitespace at beginning or end: %r' % (k, ))
if isinstance(v, bytes):
v = v.decode('utf-8')
elif not isinstance(v, str):
err('Converting value to string: %r' % (v, ))
v = str(v)
if '\n' in v:
raise KVFormError(
'Invalid input for seqToKV: value contains newline: %r' %
(v, ))
if v.strip() != v:
err('Value has whitespace at beginning or end: %r' % (v, ))
lines.append(k + ':' + v + '\n')
return ''.join(lines).encode('utf-8') |
python | def render_property(property):
"""Render a property for bosh manifest, according to its type."""
# This ain't the prettiest thing, but it should get the job done.
# I don't think we have anything more elegant available at bosh-manifest-generation time.
# See https://docs.pivotal.io/partners/product-template-reference.html for list.
if 'type' in property and property['type'] in PROPERTY_FIELDS:
fields = {}
for field in PROPERTY_FIELDS[property['type']]:
if type(field) is tuple:
fields[field[0]] = '(( .properties.{}.{} ))'.format(property['name'], field[1])
else:
fields[field] = '(( .properties.{}.{} ))'.format(property['name'], field)
out = { property['name']: fields }
else:
if property.get('is_reference', False):
out = { property['name']: property['default'] }
else:
out = { property['name']: '(( .properties.{}.value ))'.format(property['name']) }
return out |
java | public static void deleteRecursively(ZooKeeperIface zk, String path)
throws IOException {
try {
List<String> children = zk.getChildren(path, false);
for (String child : children) {
deleteRecursively(zk, joinPath(path, child));
}
zk.delete(path, -1);
} catch (KeeperException e) {
keeperException("Unrecoverable ZooKeeper error deleting " + path, e);
} catch (InterruptedException e) {
interruptedException("Interrupted deleting " + path, e);
}
} |
java | private Condition parseFileCondition(Element element) {
FileCondition condition = new FileCondition();
condition.setFilePath(element.getAttribute("path"));
return condition;
} |
java | public Observable<ServiceResponse<KeyPhraseBatchResult>> keyPhrasesWithServiceResponseAsync(KeyPhrasesOptionalParameter keyPhrasesOptionalParameter) {
if (this.client.azureRegion() == null) {
throw new IllegalArgumentException("Parameter this.client.azureRegion() is required and cannot be null.");
}
final List<MultiLanguageInput> documents = keyPhrasesOptionalParameter != null ? keyPhrasesOptionalParameter.documents() : null;
return keyPhrasesWithServiceResponseAsync(documents);
} |
java | @Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public List<JSLProperties> getProperties() {
if (properties == null) {
properties = new ArrayList<JSLProperties>();
}
return this.properties;
} |
java | public String getDocumentTypeDeclarationPublicIdentifier()
{
Document doc;
if (m_root.getNodeType() == Node.DOCUMENT_NODE)
doc = (Document) m_root;
else
doc = m_root.getOwnerDocument();
if (null != doc)
{
DocumentType dtd = doc.getDoctype();
if (null != dtd)
{
return dtd.getPublicId();
}
}
return null;
} |
python | def to_json(self):
"""
Returns the options as JSON.
:return: the object as string
:rtype: str
"""
return json.dumps(self.to_dict(), sort_keys=True, indent=2, separators=(',', ': ')) |
java | public static String escapeJS(String str, char quotesUsed) {
return escapeJS(str, quotesUsed, (CharsetEncoder) null);
} |
python | def create(parser: Parser, obj: PersistedObject = None):
"""
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param parser:
:param obj:
:return:
"""
if obj is not None:
return _InvalidParserException('Error ' + str(obj) + ' cannot be parsed using ' + str(parser) + ' since '
+ ' this parser does not support ' + obj.get_pretty_file_mode())
else:
return _InvalidParserException('Error this parser is neither SingleFile nor MultiFile !') |
java | public final void bsr(Register dst, Mem src)
{
assert(!dst.isRegType(REG_GPB));
emitX86(INST_BSR, dst, src);
} |
python | def solveConsKinkyPref(solution_next,IncomeDstn,PrefShkDstn,
LivPrb,DiscFac,CRRA,Rboro,Rsave,PermGroFac,BoroCnstArt,
aXtraGrid,vFuncBool,CubicBool):
'''
Solves a single period of a consumption-saving model with preference shocks
to marginal utility and a different interest rate on saving vs borrowing.
Problem is solved using the method of endogenous gridpoints.
Parameters
----------
solution_next : ConsumerSolution
The solution to the succeeding one period problem.
IncomeDstn : [np.array]
A list containing three arrays of floats, representing a discrete
approximation to the income process between the period being solved
and the one immediately following (in solution_next). Order: event
probabilities, permanent shocks, transitory shocks.
PrefShkDstn : [np.array]
Discrete distribution of the multiplicative utility shifter. Order:
probabilities, preference shocks.
LivPrb : float
Survival probability; likelihood of being alive at the beginning of
the succeeding period.
DiscFac : float
Intertemporal discount factor for future utility.
CRRA : float
Coefficient of relative risk aversion.
Rboro: float
Interest factor on assets between this period and the succeeding
period when assets are negative.
Rsave: float
Interest factor on assets between this period and the succeeding
period when assets are positive.
PermGroGac : float
Expected permanent income growth factor at the end of this period.
BoroCnstArt: float or None
Borrowing constraint for the minimum allowable assets to end the
period with. If it is less than the natural borrowing constraint,
then it is irrelevant; BoroCnstArt=None indicates no artificial bor-
rowing constraint.
aXtraGrid: np.array
Array of "extra" end-of-period asset values-- assets above the
absolute minimum acceptable level.
vFuncBool: boolean
An indicator for whether the value function should be computed and
included in the reported solution.
CubicBool: boolean
An indicator for whether the solver should use cubic or linear inter-
polation.
Returns
-------
solution: ConsumerSolution
The solution to the single period consumption-saving problem. Includes
a consumption function cFunc (using linear splines), a marginal value
function vPfunc, a minimum acceptable level of normalized market re-
sources mNrmMin, normalized human wealth hNrm, and bounding MPCs MPCmin
and MPCmax. It might also have a value function vFunc. The consumption
function is defined over normalized market resources and the preference
shock, c = cFunc(m,PrefShk), but the (marginal) value function is defined
unconditionally on the shock, just before it is revealed.
'''
solver = ConsKinkyPrefSolver(solution_next,IncomeDstn,PrefShkDstn,LivPrb,
DiscFac,CRRA,Rboro,Rsave,PermGroFac,BoroCnstArt,
aXtraGrid,vFuncBool,CubicBool)
solver.prepareToSolve()
solution = solver.solve()
return solution |
python | def check_option(self, key, subkey, value):
"""Evaluate if a given value fits the option.
If an option has a limited set of available values, check if the
provided value is amongst them.
:param str key: First identifier of the option.
:param str subkey: Second identifier of the option.
:param value: Value to test (type varies).
:return: :class:`bool` - does ``value`` belong to the options?
:raise:
:NotRegisteredError: If ``key`` or ``subkey`` do not define any
option.
:ValueError: If the provided value is not the expected
type for the option.
"""
key, subkey = _lower_keys(key, subkey)
_entry_must_exist(self.gc, key, subkey)
df = self.gc[(self.gc["k1"] == key) & (self.gc["k2"] == subkey)]
ev.value_eval(value, df["type"].values[0])
if df["values"].values[0] is not None:
return value in df["values"].values[0]
return True |
python | def _set_containable_view(self, session):
"""Sets the underlying containable views to match current view"""
for obj_name in self._containable_views:
if self._containable_views[obj_name] == SEQUESTERED:
try:
getattr(session, 'use_sequestered_' + obj_name + '_view')()
except AttributeError:
pass
else:
try:
getattr(session, 'use_unsequestered_' + obj_name + '_view')()
except AttributeError:
pass |
python | def createPaypalPayment(request):
'''
This view handles the creation of Paypal Express Checkout Payment objects.
All Express Checkout payments must either be associated with a pre-existing Invoice
or a registration, or they must have an amount and type passed in the post data
(such as gift certificate payment requests).
'''
logger.info('Received request for Paypal Express Checkout payment.')
invoice_id = request.POST.get('invoice_id')
tr_id = request.POST.get('reg_id')
amount = request.POST.get('amount')
submissionUserId = request.POST.get('user_id')
transactionType = request.POST.get('transaction_type')
taxable = request.POST.get('taxable', False)
# If a specific amount to pay has been passed, then allow payment
# of that amount.
if amount:
try:
amount = float(amount)
except ValueError:
logger.error('Invalid amount passed')
return HttpResponseBadRequest()
# Parse if a specific submission user is indicated
submissionUser = None
if submissionUserId:
try:
submissionUser = User.objects.get(id=int(submissionUserId))
except (ValueError, ObjectDoesNotExist):
logger.warning('Invalid user passed, submissionUser will not be recorded.')
try:
# Invoice transactions are usually payment on an existing invoice.
if invoice_id:
this_invoice = Invoice.objects.get(id=invoice_id)
this_description = _('Invoice Payment: %s' % this_invoice.id)
if not amount:
amount = this_invoice.outstandingBalance
# This is typical of payment at the time of registration
elif tr_id:
tr = TemporaryRegistration.objects.get(id=int(tr_id))
tr.expirationDate = timezone.now() + timedelta(minutes=getConstant('registration__sessionExpiryMinutes'))
tr.save()
this_invoice = Invoice.get_or_create_from_registration(tr, submissionUser=submissionUser)
this_description = _('Registration Payment: #%s' % tr_id)
if not amount:
amount = this_invoice.outstandingBalance
# All other transactions require both a transaction type and an amount to be specified
elif not transactionType or not amount:
logger.error('Insufficient information passed to createPaypalPayment view.')
raise ValueError
else:
# Gift certificates automatically get a nicer invoice description
if transactionType == 'Gift Certificate':
this_description = _('Gift Certificate Purchase')
else:
this_description = transactionType
this_invoice = Invoice.create_from_item(
float(amount),
this_description,
submissionUser=submissionUser,
calculate_taxes=(taxable is not False),
transactionType=transactionType,
)
except (ValueError, ObjectDoesNotExist) as e:
logger.error('Invalid registration information passed to createPaypalPayment view: (%s, %s, %s)' % (invoice_id, tr_id, amount))
logger.error(e)
return HttpResponseBadRequest()
this_currency = getConstant('general__currencyCode')
this_total = min(this_invoice.outstandingBalance, amount)
this_subtotal = this_total - this_invoice.taxes
this_transaction = {
'amount': {
'total': round(this_total,2),
'currency': this_currency,
'details': {
'subtotal': round(this_subtotal,2),
'tax': round(this_invoice.taxes,2),
},
},
'description': str(this_description),
'item_list': {
'items': []
}
}
for item in this_invoice.invoiceitem_set.all():
if not getConstant('registration__buyerPaysSalesTax'):
this_item_price = item.grossTotal - item.taxes
else:
this_item_price = item.grossTotal
this_transaction['item_list']['items'].append({
'name': str(item.name),
'price': round(this_item_price,2),
'tax': round(item.taxes,2),
'currency': this_currency,
'quantity': 1,
})
# Because the Paypal API requires that the subtotal add up to the sum of the item
# totals, we must add a negative line item for discounts applied, and a line item
# for the remaining balance if there is to be one.
if this_invoice.grossTotal != this_invoice.total:
this_transaction['item_list']['items'].append({
'name': str(_('Total Discounts')),
'price': round(this_invoice.total,2) - round(this_invoice.grossTotal,2),
'currency': this_currency,
'quantity': 1,
})
if this_invoice.amountPaid > 0:
this_transaction['item_list']['items'].append({
'name': str(_('Previously Paid')),
'price': -1 * round(this_invoice.amountPaid,2),
'currency': this_currency,
'quantity': 1,
})
if amount != this_invoice.outstandingBalance:
this_transaction['item_list']['items'].append({
'name': str(_('Remaining Balance After Payment')),
'price': round(amount,2) - round(this_invoice.outstandingBalance,2),
'currency': this_currency,
'quantity': 1,
})
# Paypal requires the Payment request to include redirect URLs. Since
# the plugin can handle actual redirects, we just pass the base URL for
# the current site.
site = SimpleLazyObject(lambda: get_current_site(request))
protocol = 'https' if request.is_secure() else 'http'
base_url = SimpleLazyObject(lambda: "{0}://{1}".format(protocol, site.domain))
payment = Payment({
'intent': 'sale',
'payer': {
'payment_method': 'paypal'
},
'transactions': [this_transaction],
'redirect_urls': {
'return_url': str(base_url),
'cancel_url': str(base_url),
}
})
if payment.create():
logger.info('Paypal payment object created.')
if this_invoice:
this_invoice.status = Invoice.PaymentStatus.authorized
this_invoice.save()
# We just keep a record of the ID and the status, because the
# API can be used to look up everything else.
PaypalPaymentRecord.objects.create(
paymentId=payment.id,
invoice=this_invoice,
status=payment.state,
)
return JsonResponse(payment.to_dict())
else:
logger.error('Paypal payment object not created.')
logger.error(payment)
logger.error(payment.error)
if this_invoice:
this_invoice.status = Invoice.PaymentStatus.error
this_invoice.save()
return HttpResponseBadRequest() |
python | def reverse_format(format_string, resolved_string):
"""
Reverse the string method format.
Given format_string and resolved_string, find arguments that would
give ``format_string.format(**arguments) == resolved_string``
Parameters
----------
format_string : str
Format template string as used with str.format method
resolved_string : str
String with same pattern as format_string but with fields
filled out.
Returns
-------
args : dict
Dict of the form {field_name: value} such that
``format_string.(**args) == resolved_string``
Examples
--------
>>> reverse_format('data_{year}_{month}_{day}.csv', 'data_2014_01_03.csv')
{'year': '2014', 'month': '01', 'day': '03'}
>>> reverse_format('data_{year:d}_{month:d}_{day:d}.csv', 'data_2014_01_03.csv')
{'year': 2014, 'month': 1, 'day': 3}
>>> reverse_format('data_{date:%Y_%m_%d}.csv', 'data_2016_10_01.csv')
{'date': datetime.datetime(2016, 10, 1, 0, 0)}
>>> reverse_format('{state:2}{zip:5}', 'PA19104')
{'state': 'PA', 'zip': '19104'}
See also
--------
str.format : method that this reverses
reverse_formats : method for reversing a list of strings using one pattern
"""
from string import Formatter
from datetime import datetime
fmt = Formatter()
args = {}
# ensure that format_string is in posix format
format_string = make_path_posix(format_string)
# split the string into bits
literal_texts, field_names, format_specs, conversions = zip(*fmt.parse(format_string))
if not any(field_names):
return {}
for i, conversion in enumerate(conversions):
if conversion:
raise ValueError(('Conversion not allowed. Found on {}.'
.format(field_names[i])))
# ensure that resolved string is in posix format
resolved_string = make_path_posix(resolved_string)
# get a list of the parts that matter
bits = _get_parts_of_format_string(resolved_string, literal_texts, format_specs)
for i, (field_name, format_spec) in enumerate(zip(field_names, format_specs)):
if field_name:
try:
if format_spec.startswith('%'):
args[field_name] = datetime.strptime(bits[i], format_spec)
elif format_spec[-1] in list('bcdoxX'):
args[field_name] = int(bits[i])
elif format_spec[-1] in list('eEfFgGn'):
args[field_name] = float(bits[i])
elif format_spec[-1] == '%':
args[field_name] = float(bits[i][:-1])/100
else:
args[field_name] = fmt.format_field(bits[i], format_spec)
except:
args[field_name] = bits[i]
return args |
java | private CmsObject initCmsObject(
HttpServletRequest req,
HttpServletResponse res,
String user,
String password,
String ouFqn)
throws CmsException {
String siteroot = null;
// gather information from request if provided
if (req != null) {
siteroot = OpenCms.getSiteManager().matchRequest(req).getSiteRoot();
}
// initialize the user
if (user == null) {
user = getDefaultUsers().getUserGuest();
}
if (siteroot == null) {
siteroot = "/";
}
CmsObject cms = initCmsObject(
req,
m_securityManager.readUser(null, user),
siteroot,
CmsProject.ONLINE_PROJECT_ID,
ouFqn);
// login the user if different from Guest and password was provided
if ((password != null) && !getDefaultUsers().isUserGuest(user)) {
cms.loginUser(user, password, CmsContextInfo.LOCALHOST);
}
return cms;
} |
java | public List<Polygon> polygons() {
List<List<List<Point>>> coordinates = coordinates();
List<Polygon> polygons = new ArrayList<>(coordinates.size());
for (List<List<Point>> points : coordinates) {
polygons.add(Polygon.fromLngLats(points));
}
return polygons;
} |
java | public ListPullRequestsResult withPullRequestIds(String... pullRequestIds) {
if (this.pullRequestIds == null) {
setPullRequestIds(new java.util.ArrayList<String>(pullRequestIds.length));
}
for (String ele : pullRequestIds) {
this.pullRequestIds.add(ele);
}
return this;
} |
java | public @Nullable Page getPage(@Nullable Predicate<Page> filter, @Nullable Page basePage) {
List<Page> suffixPages = getPages(filter, basePage);
if (suffixPages.isEmpty()) {
return null;
}
else {
return suffixPages.get(0);
}
} |
python | def subp(cmd):
"""
Run a command as a subprocess.
Return a triple of return code, standard out, standard err.
"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
return ReturnTuple(proc.returncode, stdout=out, stderr=err) |
java | @Override
protected ResultSet findChildNodesByParentIdentifier(String parentCid) throws SQLException
{
if (findNodesByParentId == null)
{
findNodesByParentId = dbConnection.prepareStatement(FIND_NODES_BY_PARENTID);
}
else
{
findNodesByParentId.clearParameters();
}
findNodesByParentId.setString(1, parentCid);
findNodesByParentId.setString(2, this.containerConfig.containerName);
return findNodesByParentId.executeQuery();
} |
java | public static DZcs cs_permute(DZcs A, int[] pinv, int[] q, boolean values)
{
int t, j, k, nz = 0, m, n, Ap[], Ai[], Cp[], Ci[] ;
DZcsa Cx = new DZcsa(), Ax = new DZcsa() ;
DZcs C ;
if (!CS_CSC(A)) return (null); /* check inputs */
m = A.m ; n = A.n ; Ap = A.p ; Ai = A.i ; Ax.x = A.x ;
C = cs_spalloc (m, n, Ap [n], values && Ax.x != null, false); /* alloc result */
Cp = C.p ; Ci = C.i ; Cx.x = C.x ;
for (k = 0 ; k < n ; k++)
{
Cp [k] = nz ; /* column k of C is column q[k] of A */
j = q != null ? (q [k]) : k ;
for (t = Ap [j] ; t < Ap [j+1] ; t++)
{
if (Cx.x != null)
Cx.set(nz, Ax.get(t)) ; /* row i of A is row pinv[i] of C */
Ci [nz++] = pinv != null ? (pinv [Ai [t]]) : Ai [t] ;
}
}
Cp [n] = nz ; /* finalize the last column of C */
return C ;
} |
python | def do_files_exist(filenames):
"""Whether any of the filenames exist."""
preexisting = [tf.io.gfile.exists(f) for f in filenames]
return any(preexisting) |
java | public static Iterator<String> iterateClassnames(final File parentDirectory,
final File ... classFiles) {
return iterateClassnames(parentDirectory, Arrays.asList(classFiles).iterator());
} |
python | def sort(self, key, reverse=False):
"""Stable sort of the table *IN-PLACE* with respect to a column.
Parameters
----------
key: int, str
index or header of the column. Normal list rules apply.
reverse : bool
If `True` then table is sorted as if each comparison was reversed.
"""
if isinstance(key, int):
index = key
elif isinstance(key, basestring):
index = self.get_column_index(key)
else:
raise TypeError("'key' must either be 'int' or 'str'")
self._table.sort(key=operator.itemgetter(index), reverse=reverse) |
java | @SuppressWarnings("unchecked")
private <T extends WorkContext> Class<T> getSupportedWorkContextClass(Class<T> adaptorWorkContext)
{
for (Class<? extends WorkContext> supportedWorkContext : SUPPORTED_WORK_CONTEXT_CLASSES)
{
// Assignable or not
if (supportedWorkContext.isAssignableFrom(adaptorWorkContext))
{
Class clz = adaptorWorkContext;
while (clz != null)
{
// Supported by the server
if (clz.equals(supportedWorkContext))
{
return clz;
}
clz = clz.getSuperclass();
}
}
}
return null;
} |
java | public URIBuilder setQuery(final String query) {
this.queryParams = parseQuery(query, Consts.UTF_8);
this.encodedQuery = null;
this.encodedSchemeSpecificPart = null;
return this;
} |
java | @Generated(value={"com.threerings.presents.tools.GenDObjectTask"})
public void setCode (String value)
{
String ovalue = this.code;
requestAttributeChange(
CODE, value, ovalue);
this.code = value;
} |
java | public
int appendLoopOnAppenders(LoggingEvent event) {
int nb = 0;
for (Appender appender : appenderList) {
appender.doAppend(event);
nb++;
}
return nb;
} |
java | public <T> DiffNode compare(final T working, final T base)
{
dispatcher.resetInstanceMemory();
try
{
return dispatcher.dispatch(DiffNode.ROOT, Instances.of(working, base), RootAccessor.getInstance());
}
finally
{
dispatcher.clearInstanceMemory();
}
} |
java | protected String delBase(final AbstractDelRequest request) throws AbstractCosException {
request.check_param();
String url = buildUrl(request);
String sign = Sign.getOneEffectiveSign(request.getBucketName(), request.getCosPath(), this.cred);
HttpRequest httpRequest = new HttpRequest();
httpRequest.setUrl(url);
httpRequest.addHeader(RequestHeaderKey.Authorization, sign);
httpRequest.addHeader(RequestHeaderKey.Content_TYPE, RequestHeaderValue.ContentType.JSON);
httpRequest.addHeader(RequestHeaderKey.USER_AGENT, this.config.getUserAgent());
httpRequest.addParam(RequestBodyKey.OP, RequestBodyValue.OP.DELETE);
httpRequest.setMethod(HttpMethod.POST);
httpRequest.setContentType(HttpContentType.APPLICATION_JSON);
return httpClient.sendHttpRequest(httpRequest);
} |
java | public boolean skipSpaces ()
{
while (m_nPos < m_sValue.length () && m_sValue.charAt (m_nPos) == ' ')
m_nPos++;
return m_nPos < m_sValue.length ();
} |
java | public List<TextPair> getInlineTextPairs(boolean markTemplates) {
List<TextPair> pairList = new ArrayList<TextPair>();
try {
//extract sections
List<ExtractedSection> beforeSections=null;
List<ExtractedSection> afterSections=null;
if(markTemplates){
//add inline marker for the template
beforeSections = ParseUtils.getSections(before.getRevisionText(), before.getRevisionID() + "",before.getRevisionID(), Arrays.asList(new String[]{template}));
afterSections = ParseUtils.getSections(after.getRevisionText(), after.getRevisionID() + "", after.getRevisionID(), Arrays.asList(new String[]{template}));
}else{
//no inline markers
beforeSections = ParseUtils.getSections(before.getRevisionText(), before.getRevisionID() + "",before.getRevisionID());
afterSections = ParseUtils.getSections(after.getRevisionText(), after.getRevisionID() + "", after.getRevisionID());
}
for (ExtractedSection tplSect : revPairType == RevisionPairType.deleteTemplate ? beforeSections : afterSections) {
// in DELETE-mode, the "before" revision contain the templates
// in ADD-mode, the "after" revision contains the templates
if (containsIgnoreCase(tplSect.getTemplates(), template)) {
// the current sect contains the template we're looking for
// now find the corresponding tpl in the other revisions
for (ExtractedSection nonTplSect : revPairType == RevisionPairType.deleteTemplate ? afterSections: beforeSections) {
// TODO how do we match the sections?
// currently only by title - we could do fuzzy matching
// of the section body
if (tplSect.getTitle()!=null&&nonTplSect.getTitle()!=null&&tplSect.getTitle().equalsIgnoreCase(nonTplSect.getTitle())) {
if (revPairType == RevisionPairType.deleteTemplate) {
pairList.add(new TextPair(tplSect.getBody(), nonTplSect.getBody()));
} else {
pairList.add(new TextPair(nonTplSect.getBody(), tplSect.getBody()));
}
}
}
}
}
} catch (Exception ex) {
//This happends if a (SWEBLE-)compiler exception occurs.S
//Sometimes, malformed xml items seem to cause class cast exceptions
//in the parser, which is not wrapped in a Compiler exception.
//Therefore, we should catch all exceptions here and return the
//TextPairs identified so far (if any)
System.err.println(ex.getMessage());
//TODO use logger!!
}
return pairList;
} |
python | def children(self):
"""get children node referenced as `children` in the
report.
:rtype: dict with name (str) -> node (ReportNode)
"""
for child in self.data.get('children', []):
if osp.exists(osp.join(self.path, child, YAML_REPORT_FILE)):
yield child, self.__class__(osp.join(self.path, child)) |
java | public <U extends T, A, B> InitialMatching2<T, U, A, B> when(
DecomposableMatchBuilder2<U, A, B> decomposableMatchBuilder) {
return new InitialMatching2<>(decomposableMatchBuilder.build(), value);
} |
java | public boolean isPrintableControl(ScreenField sField, int iPrintOptions)
{
if ((sField == null) || (sField == this))
return this.getScreenFieldView().isPrintableControl(iPrintOptions);
return sField.isPrintableControl(null, iPrintOptions);
} |
java | public void count(String propertyName, String alias) {
final CountProjection proj = Projections.count(calculatePropertyName(propertyName));
addProjectionToList(proj, alias);
} |
java | @Override
public <T> T getParameterValue(Parameter<T> paramParameter)
{
Object value = kunderaQuery.getClauseValue(paramParameter);
if (value == null)
{
throw new IllegalStateException("parameter has not been bound" + paramParameter);
}
return (T) value;
} |
java | public Map<BenchmarkMethod, Integer> getNumberOfMethodsAndRuns()
throws PerfidixMethodCheckException {
final Map<BenchmarkMethod, Integer> returnVal = new HashMap<BenchmarkMethod, Integer>();
// instantiate objects, just for getting runs
final List<BenchmarkMethod> meths = getBenchmarkMethods();
for (final BenchmarkMethod meth : meths) {
// TODO respect data provider
int numberOfRuns = BenchmarkMethod.getNumberOfAnnotatedRuns(meth
.getMethodToBench());
if (numberOfRuns == Bench.NONE_RUN) {
numberOfRuns = conf.getRuns();
}
returnVal.put(meth, numberOfRuns);
}
return returnVal;
} |
python | def _big_endian_int(bits: np.ndarray) -> int:
"""Returns the big-endian integer specified by the given bits.
For example, [True, False, False, True, False] becomes binary 10010 which
is 18 in decimal.
Args:
bits: Descending bits of the integer, with the 1s bit at the end.
Returns:
The integer.
"""
result = 0
for e in bits:
result <<= 1
if e:
result |= 1
return result |
java | public BatchDetectSyntaxResult withResultList(BatchDetectSyntaxItemResult... resultList) {
if (this.resultList == null) {
setResultList(new java.util.ArrayList<BatchDetectSyntaxItemResult>(resultList.length));
}
for (BatchDetectSyntaxItemResult ele : resultList) {
this.resultList.add(ele);
}
return this;
} |
python | def find_range(self, interval):
"""wrapper for find"""
return self.find(self.tree, interval, self.start, self.end) |
java | protected final void pagedExecuteWorkFlow(String queryId, LogicalWorkflow workflow, IResultHandler resultHandler,
int pageSize) throws ConnectorException {
checkIsSupported(workflow);
ClusterName clusterName = ((Project) workflow.getInitialSteps().get(0)).getClusterName();
pagedExecute(queryId, (Project) workflow.getInitialSteps().get(0),
connectionHandler.getConnection(clusterName.getName()), resultHandler);
} |
python | def _words_by_distinctiveness_score(vocab, topic_word_distrib, doc_topic_distrib, doc_lengths, n=None,
least_to_most=False):
"""Return words in `vocab` ordered by distinctiveness score."""
p_t = get_marginal_topic_distrib(doc_topic_distrib, doc_lengths)
distinct = get_word_distinctiveness(topic_word_distrib, p_t)
return _words_by_score(vocab, distinct, least_to_most=least_to_most, n=n) |
python | def cipher(rkey, pt, Nk=4):
"""AES encryption cipher."""
assert Nk in {4, 6, 8}
Nr = Nk + 6
rkey = rkey.reshape(4*(Nr+1), 32)
pt = pt.reshape(128)
# first round
state = add_round_key(pt, rkey[0:4])
for i in range(1, Nr):
state = sub_bytes(state)
state = shift_rows(state)
state = mix_columns(state)
state = add_round_key(state, rkey[4*i:4*(i+1)])
# final round
state = sub_bytes(state)
state = shift_rows(state)
state = add_round_key(state, rkey[4*Nr:4*(Nr+1)])
return state |
java | public static String getRedactedJsonString(final String jsonString, final String... jsonPathsToRedact) {
String rtn = "";
if (jsonString != null && jsonString != "") {
rtn = jsonString;
try {
ObjectMapper mapper = new ObjectMapper();
JsonNode rootJsonNode = mapper.readTree(jsonString);
List<JsonNode> nodesToRedact;
ObjectNode writableNodeToRedact;
for (String jsonPathToRedact : jsonPathsToRedact) {
if (jsonPathToRedact != null && jsonPathToRedact != "") {
nodesToRedact = rootJsonNode.findParents(jsonPathToRedact);
for (JsonNode jsonNodeToRedact : nodesToRedact) {
writableNodeToRedact = (ObjectNode)jsonNodeToRedact;
if (writableNodeToRedact != null && !writableNodeToRedact.isMissingNode()) {
writableNodeToRedact.put(jsonPathToRedact, "[REDACTED]");
}
}
}
}
rtn = mapper.writeValueAsString(rootJsonNode);
} catch (IOException e) {
rtn = "";
}
}
return rtn;
} |
python | def dim_dc(self, pars):
r"""
:math:`\frac{\partial \hat{\rho''}(\omega)}{\partial c} = \rho_0
\frac{-m sin(\frac{c \pi}{2}) ln(\omega \tau)(\omega \tau)^c - m
(\omega \tau)^c \frac{\pi}{2} cos(\frac{\pi}{2}}{1 + 2 (\omega \tau)^c
cos(\frac{c \pi}{2}) + (\omega \tau)^{2 c}} + \rho_0 \frac{\left[-m
(\omega \tau)^c cos(\frac{c \pi}{2}) \right] \cdot \left[ -2 ln(\omega
\tau) (\omega \tau)^c cos(\frac{c \pi}{2}) + 2 (\omega \tau)^c
\frac{\pi}{2} cos(\frac{c \pi}{2}) \right] + \left[2 ln(\omega \tau)
(\omega \tau)^{2 c}\right]}{\left[1 + 2 (\omega \tau)^c cos(\frac{c
\pi}{2}) + (\omega \tau)^{2 c}\right]^2}`
"""
self._set_parameters(pars)
# term1
nom1a = - self.m * np.log(self.w * self.tau) * self.otc *\
np.sin(self.ang)
nom1b = - self.m * self.otc * (np.pi / 2.0) * np.cos(self.ang)
term1 = (nom1a + nom1b) / self.denom
# term2
nom2 = (self.m * self.otc * np.sin(self.ang)) *\
(2 * np.log(self.w * self.tau) * self.otc * np.cos(self.ang) -
2 * self.otc * (np.pi / 2.0) * np.sin(self.ang) +
2 * np.log(self.w * self.tau) * self.otc2)
term2 = nom2 / self.denom ** 2
result = term1 + term2
result *= self.rho0
return result |
python | def estimate_row_scales(
self,
X_centered,
column_scales):
"""
row_scale[i]**2 =
mean{j in observed[i, :]}{
(X[i, j] - row_center[i] - column_center[j]) ** 2
--------------------------------------------------
column_scale[j] ** 2
}
"""
n_rows, n_cols = X_centered.shape
column_scales = np.asarray(column_scales)
if len(column_scales) != n_cols:
raise ValueError("Expected length %d but got shape %s" % (
n_cols, column_scales))
row_variances = np.nanmean(
X_centered ** 2 / (column_scales ** 2).reshape((1, n_cols)),
axis=1)
row_variances[row_variances == 0] = 1.0
assert len(row_variances) == n_rows, "%d != %d" % (
len(row_variances),
n_rows)
return np.sqrt(row_variances) |
java | public void setupKeyBuffer(BaseBuffer destBuffer, int iAreaDesc, boolean bMoveToField)
{
boolean bForceUniqueKey = true;
int iKeyFieldCount = this.getKeyFields(bForceUniqueKey, false);
for (int iKeyFieldSeq = DBConstants.MAIN_KEY_FIELD; iKeyFieldSeq < iKeyFieldCount; iKeyFieldSeq++)
{
KeyField keyField = this.getKeyField(iKeyFieldSeq, bForceUniqueKey);
BaseField field = keyField.getField(DBConstants.FILE_KEY_AREA);
BaseField paramField = keyField.getField(iAreaDesc);
if (bMoveToField)
if (iAreaDesc != DBConstants.FILE_KEY_AREA) // Don't move this they are the same field
{
paramField.moveFieldToThis(field, DBConstants.DONT_DISPLAY, DBConstants.INIT_MOVE); // opy the value
boolean bIsModified = field.isModified();
paramField.setModified(bIsModified);
}
if (destBuffer != null)
{ // Copy to buffer also?
destBuffer.addNextField(paramField);
}
}
if (destBuffer != null)
destBuffer.finishBuffer();
} |
java | public static boolean isMultipart(HttpServletRequest request) {
if (!"post".equals(request.getMethod().toLowerCase())) {
return false;
}
String contentType = request.getContentType();
return contentType != null && contentType.toLowerCase().startsWith("multipart/");
} |
python | def _add_span_node_ids_to_token_nodes(self):
"""
Adds to every token node the list of spans (span node IDs) that it
belongs to.
TokenNode.spans - a list of `int` ids of `SpanNode`s
"""
span_dict = defaultdict(list)
for span_edge in self._spanning_relation_ids:
token_node_id = self.edges[span_edge].target
span_node_id = self.edges[span_edge].source
span_dict[token_node_id].append(span_node_id)
for token_node_id in span_dict:
self.nodes[token_node_id].spans = span_dict[token_node_id] |
python | def search(reader, key, prev_size=0, compare_func=cmp, block_size=8192):
"""
Perform a binary search for a specified key to within a 'block_size'
(default 8192) sized block followed by linear search
within the block to find first matching line.
When performin_g linear search, keep track of up to N previous lines before
first matching line.
"""
iter_ = binsearch(reader, key, compare_func, block_size)
iter_ = linearsearch(iter_,
key, prev_size=prev_size,
compare_func=compare_func)
return iter_ |
java | public Response deleteMultiple(@NotNull @PathParam("ids") URI_ID id,
@NotNull @PathParam("ids") final PathSegment ids,
@QueryParam("permanent") final boolean permanent) throws Exception {
Set<String> idSet = ids.getMatrixParameters().keySet();
final Response.ResponseBuilder builder = Response.noContent();
final TxRunnable failProcess = t -> builder.status(Response.Status.NOT_FOUND);
final MODEL_ID firstId = tryConvertId(ids.getPath());
final Set<MODEL_ID> idCollection = Sets.newLinkedHashSet();
idCollection.add(firstId);
if (!idSet.isEmpty()) {
idCollection.addAll(Collections2.transform(idSet, this::tryConvertId));
}
matchedDelete(firstId, idCollection, permanent);
if (!idSet.isEmpty()) {
executeTx(t -> {
preDeleteMultipleModel(idCollection, permanent);
boolean p = deleteMultipleModel(idCollection, permanent);
if (!p) {
builder.status(Response.Status.ACCEPTED);
}
postDeleteMultipleModel(idCollection, p);
}, failProcess);
} else {
executeTx(t -> {
preDeleteModel(firstId, permanent);
boolean p = deleteModel(firstId, permanent);
if (!p) {
builder.status(Response.Status.ACCEPTED);
}
postDeleteModel(firstId, p);
}, failProcess);
}
return builder.build();
} |
java | void update() {
final BundleContext context = componentContext.getBundleContext();
// determine the service filter to use for discovering the Library service this bell is for
String libraryRef = library.id();
// it is unclear if only looking at the id would work here.
// other examples in classloading use both id and service.pid to look up so doing the same here.
String libraryStatusFilter = String.format("(&(objectClass=%s)(|(id=%s)(service.pid=%s)))", Library.class.getName(), libraryRef, libraryRef);
Filter filter;
try {
filter = context.createFilter(libraryStatusFilter);
} catch (InvalidSyntaxException e) {
// should not happen, but blow up if it does
throw new RuntimeException(e);
}
final Set<String> serviceNames = getServiceNames((String[]) config.get(SERVICE_ATT));
// create a tracker that will register the services once the library becomes available
ServiceTracker<Library, List<ServiceRegistration<?>>> newTracker = null;
newTracker = new ServiceTracker<Library, List<ServiceRegistration<?>>>(context, filter, new ServiceTrackerCustomizer<Library, List<ServiceRegistration<?>>>() {
@Override
public List<ServiceRegistration<?>> addingService(ServiceReference<Library> libraryRef) {
Library library = context.getService(libraryRef);
// Got the library now register the services.
// The list of registrations is returned so we don't have to store them ourselves.
return registerLibraryServices(library, serviceNames);
}
@Override
public void modifiedService(ServiceReference<Library> libraryRef, List<ServiceRegistration<?>> metaInfServices) {
// don't care
}
@Override
@FFDCIgnore(IllegalStateException.class)
public void removedService(ServiceReference<Library> libraryRef, List<ServiceRegistration<?>> metaInfServices) {
// THe library is going away; need to unregister the services
for (ServiceRegistration<?> registration : metaInfServices) {
try {
registration.unregister();
} catch (IllegalStateException e) {
// ignore; already unregistered
}
}
context.ungetService(libraryRef);
}
});
trackerLock.lock();
try {
if (tracker != null) {
// close the existing tracker so we unregister existing services
tracker.close();
}
// store and open the new tracker so we can register the configured services.
tracker = newTracker;
tracker.open();
} finally {
trackerLock.unlock();
}
} |
python | def _match_error_to_data_set(x, ex):
"""
Inflates ex to match the dimensionality of x, "intelligently".
x is assumed to be a 2D array.
"""
# Simplest case, ex is None or a number
if not _fun.is_iterable(ex):
# Just make a matched list of Nones
if ex is None: ex = [ex]*len(x)
# Make arrays of numbers
if _fun.is_a_number(ex):
value = ex # temporary storage
ex = []
for n in range(len(x)):
ex.append([value]*len(x[n]))
# Otherwise, ex is iterable
# Default behavior: If the elements are all numbers and the length matches
# that of the first x-array, assume this is meant to match all the x
# data sets
if _fun.elements_are_numbers(ex) and len(ex) == len(x[0]): ex = [ex]*len(x)
# The user may specify a list of some iterable and some not. Assume
# in this case that at least the lists are the same length
for n in range(len(x)):
# do nothing to the None's
# Inflate single numbers to match
if _fun.is_a_number(ex[n]): ex[n] = [ex[n]]*len(x[n])
return ex |
python | def _cleave_interface(self, bulk_silica, tile_x, tile_y, thickness):
"""Carve interface from bulk silica.
Also includes a buffer of O's above and below the surface to ensure the
interface is coated.
"""
O_buffer = self._O_buffer
tile_z = int(math.ceil((thickness + 2*O_buffer) / bulk_silica.periodicity[2]))
bulk = mb.recipes.TiledCompound(bulk_silica, n_tiles=(tile_x, tile_y, tile_z))
interface = mb.Compound(periodicity=(bulk.periodicity[0],
bulk.periodicity[1],
0.0))
for i, particle in enumerate(bulk.particles()):
if ((particle.name == 'Si' and O_buffer < particle.pos[2] < (thickness + O_buffer)) or
(particle.name == 'O' and particle.pos[2] < (thickness + 2*O_buffer))):
interface_particle = mb.Compound(name=particle.name, pos=particle.pos)
interface.add(interface_particle, particle.name + "_{}".format(i))
self.add(interface) |
java | public static <K, V> ImmutableMap<K, V> copyParallelListsToMap(Iterable<K> keys,
Iterable<V> values) {
final ImmutableMap.Builder<K, V> ret = ImmutableMap.builder();
final Iterator<K> keyIt = keys.iterator();
final Iterator<V> valueIt = values.iterator();
while (keyIt.hasNext() && valueIt.hasNext()) {
ret.put(keyIt.next(), valueIt.next());
}
if (!keyIt.hasNext() && !valueIt.hasNext()) {
return ret.build();
} else {
if (keyIt.hasNext()) {
throw new IllegalArgumentException(
"When pairing keys and values, there were more keys than values");
} else {
throw new IllegalArgumentException(
"When pairing keys and values, there were more values than keys");
}
}
} |
python | def OR(self):
"""
Switches default query joiner from " AND " to " OR "
Returns:
Self. Queryset object.
"""
clone = copy.deepcopy(self)
clone.adapter._QUERY_GLUE = ' OR '
return clone |
java | public static String cloneContent(String source, GitService service, String comment) throws Exception {
String rev = GitService.moveContentToBranch(source, service, service.getBranchName(), comment);
service.push(false);
return rev;
} |
Subsets and Splits