language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public void send(Message message, TestContext context) {
String correlationKeyName = getEndpointConfiguration().getCorrelator().getCorrelationKeyName(getName());
String correlationKey = getEndpointConfiguration().getCorrelator().getCorrelationKey(message);
correlationManager.saveCorrelationKey(correlationKeyName, correlationKey, context);
SshRequest request = (SshRequest) getEndpointConfiguration().getMessageConverter().convertOutbound(message, getEndpointConfiguration(), context);
if (getEndpointConfiguration().isStrictHostChecking()) {
setKnownHosts();
}
String rUser = getRemoteUser(message);
connect(rUser);
ChannelExec channelExec = null;
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
ByteArrayOutputStream errStream = new ByteArrayOutputStream();
int rc;
try {
channelExec = openChannelExec();
channelExec.setErrStream(errStream);
channelExec.setOutputStream(outStream);
channelExec.setCommand(request.getCommand());
doConnect(channelExec);
if (request.getStdin() != null) {
sendStandardInput(channelExec, request.getStdin());
}
waitCommandToFinish(channelExec);
rc = channelExec.getExitStatus();
} finally {
if (channelExec != null && channelExec.isConnected()) {
channelExec.disconnect();
}
disconnect();
}
SshResponse sshResp = new SshResponse(outStream.toString(),errStream.toString(),rc);
Message response = getEndpointConfiguration().getMessageConverter().convertInbound(sshResp, getEndpointConfiguration(), context)
.setHeader("user", rUser);
correlationManager.store(correlationKey, response);
}
|
python
|
def filter(self, msg_type=None, maxsize=0):
"""
Get a filtered iterator of messages for synchronous, blocking use in
another thread.
"""
if self._dead:
return iter(())
iterator = Handler._SBPQueueIterator(maxsize)
# We use a weakref so that the iterator may be garbage collected if it's
# consumer no longer has a reference.
ref = weakref.ref(iterator)
self._sinks.append(ref)
def feediter(msg, **metadata):
i = ref()
if i is not None:
i(msg, **metadata)
else:
raise Handler._DeadCallbackException
self.add_callback(feediter, msg_type)
return iterator
|
python
|
def default_display(value, with_module=True):
"""Default display for unknown objects."""
object_type = type(value)
try:
name = object_type.__name__
module = object_type.__module__
if with_module:
return name + ' object of ' + module + ' module'
else:
return name
except:
type_str = to_text_string(object_type)
return type_str[1:-1]
|
java
|
public static RowMajorSparseMatrix fromCSV(String csv) {
return Matrix.fromCSV(csv).to(Matrices.SPARSE_ROW_MAJOR);
}
|
python
|
def zincr(self, name, key, amount=1):
"""
Increase the score of ``key`` in zset ``name`` by ``amount``. If no key
exists, the value will be initialized as ``amount``
Like **Redis.ZINCR**
:param string name: the zset name
:param string key: the key name
:param int amount: increments
:return: the integer value of ``key`` in zset ``name``
:rtype: int
>>> ssdb.zincr('zset_2', 'key1', 7)
49
>>> ssdb.zincr('zset_2', 'key2', 3)
317
>>> ssdb.zincr('zset_2', 'key_not_exists', 101)
101
>>> ssdb.zincr('zset_not_exists', 'key_not_exists', 8848)
8848
"""
amount = get_integer('amount', amount)
return self.execute_command('zincr', name, key, amount)
|
java
|
public void addPlugin(String name, MoskitoPlugin plugin, PluginConfig config){
plugins.put(name, plugin);
try{
plugin.initialize();
configs.put(name, config);
}catch(Exception e){
log.warn("couldn't initialize plugin "+name+" - "+plugin+", removing", e);
plugins.remove(name);
}
}
|
java
|
public void marshall(GetComplianceSummaryByConfigRuleRequest getComplianceSummaryByConfigRuleRequest, ProtocolMarshaller protocolMarshaller) {
if (getComplianceSummaryByConfigRuleRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def has_offline_historical_manager_or_raise(self):
"""Raises an exception if model uses a history manager and
historical model history_id is not a UUIDField.
Note: expected to use edc_model.HistoricalRecords instead of
simple_history.HistoricalRecords.
"""
try:
model = self.instance.__class__.history.model
except AttributeError:
model = self.instance.__class__
field = [field for field in model._meta.fields if field.name == "history_id"]
if field and not isinstance(field[0], UUIDField):
raise OfflineHistoricalManagerError(
f"Field 'history_id' of historical model "
f"'{model._meta.app_label}.{model._meta.model_name}' "
"must be an UUIDfield. "
"For history = HistoricalRecords() use edc_model.HistoricalRecords instead of "
"simple_history.HistoricalRecords(). "
f"See '{self.instance._meta.app_label}.{self.instance._meta.model_name}'."
)
|
java
|
@Override
protected void prepareGUI(Container container)
{
renderer = new SwingBiomorphRenderer();
console = new SwingConsole(5);
selectionDialog = new JDialog((JFrame) null, "Biomorph Selection", true);
biomorphHolder = new JPanel(new GridLayout(1, 1));
container.add(new ControlPanel(), BorderLayout.WEST);
container.add(biomorphHolder, BorderLayout.CENTER);
biomorphHolder.setBorder(BorderFactory.createTitledBorder("Last Evolved Biomorph"));
biomorphHolder.add(new JLabel("Nothing generated yet.", JLabel.CENTER));
selectionDialog.add(console, BorderLayout.CENTER);
selectionDialog.setSize(800, 600);
selectionDialog.validate();
}
|
java
|
static URL tryToGetValidUrl(String workingDir, String path, String filename) {
try {
if (new File(filename).exists())
return new File(filename).toURI().toURL();
if (new File(path + File.separator + filename).exists())
return new File(path + File.separator + filename).toURI().toURL();
if (new File(workingDir + File.separator + filename).exists())
return new File(workingDir + File.separator + filename).toURI().toURL();
if (new File(new URL(filename).getFile()).exists())
return new File(new URL(filename).getFile()).toURI().toURL();
} catch (MalformedURLException e) {
// don't do anything, we're going on the assumption it is a jar, which could be wrong
}
return null;
}
|
java
|
private JButton getBtnCapture() {
if (btnCapture == null) {
btnCapture = new JButton();
btnCapture.setText("Capture");
btnCapture.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
btnCapture.setHorizontalTextPosition(javax.swing.SwingConstants.TRAILING);
// btnCapture.setPreferredSize(new java.awt.Dimension(73,28));
btnCapture.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(java.awt.event.ActionEvent e) {
capture();
}
});
}
return btnCapture;
}
|
java
|
public static Class<?> commonType(Class<?> baseType, Object fromValue, IntSupplier length) {
if ( Object.class != baseType )
return baseType;
Class<?> common = null;
int len = length.getAsInt();
for (int i = 0; i < len; i++)
if ( Object.class == (common = Types.getCommonType(common,
Types.getType(Array.get(fromValue, 0)))) )
break;
if ( null != common && common != baseType )
return common;
return baseType;
}
|
python
|
def insert(self, nodes, pos):
# TODO: check docstring
"""Inserts all nodes from `nodes` list into this route at position `pos`
Parameters
----------
nodes : type
Desc
pos : type
Desc
"""
node_list = []
nodes_demand = 0
for node in [node for node in nodes]:
if node._allocation:
node._allocation.deallocate([node])
node_list.append(node)
node._allocation = self
nodes_demand = nodes_demand + node.demand()
self._nodes = self._nodes[:pos] + node_list + self._nodes[pos:]
self._demand += nodes_demand
|
python
|
def get_by_addr(self, address):
"""
Lookup a set of notifications by address
Args:
address (UInt160 or str): hash of address for notifications
Returns:
list: a list of notifications
"""
addr = address
if isinstance(address, str) and len(address) == 34:
addr = Helper.AddrStrToScriptHash(address)
if not isinstance(addr, UInt160):
raise Exception("Incorrect address format")
addrlist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_ADDR).snapshot()
results = []
for val in addrlist_snapshot.iterator(prefix=bytes(addr.Data), include_key=False):
if len(val) > 4:
try:
event = SmartContractEvent.FromByteArray(val)
results.append(event)
except Exception as e:
logger.error("could not parse event: %s %s" % (e, val))
return results
|
java
|
public static Object readUrl(Context cx, Scriptable thisObj, Object[] args,
Function funObj)
throws IOException
{
if (args.length == 0) {
throw reportRuntimeError("msg.shell.readUrl.bad.args");
}
String url = ScriptRuntime.toString(args[0]);
String charCoding = null;
if (args.length >= 2) {
charCoding = ScriptRuntime.toString(args[1]);
}
return readUrl(url, charCoding, false);
}
|
python
|
def props_value(props):
"""
Properties value.
:param dict props:
Properties dictionary.
:rtype:
str
:return:
Properties as string.
"""
sep = ""
result_value = ""
for prop_key, prop_value in props.items():
result_value = "{}{}{}={}".format(
result_value,
sep,
prop_key,
prop_value
)
sep = ":"
return result_value
|
python
|
def session(self):
"""
This is a session between the consumer (your website) and the provider
(e.g. Twitter). It is *not* a session between a user of your website
and your website.
:return:
"""
return self.session_class(
client_key=self.client_key,
client_secret=self.client_secret,
signature_method=self.signature_method,
signature_type=self.signature_type,
rsa_key=self.rsa_key,
client_class=self.client_class,
force_include_body=self.force_include_body,
blueprint=self,
base_url=self.base_url,
**self.kwargs
)
|
python
|
def get_JWT(url, address=None):
"""
Given a URL, fetch and decode the JWT it points to.
If address is given, then authenticate the JWT with the address.
Return None if we could not fetch it, or unable to authenticate it.
NOTE: the URL must be usable by the requests library
"""
jwt_txt = None
jwt = None
log.debug("Try {}".format(url))
# special case: handle file://
urlinfo = urllib2.urlparse.urlparse(url)
if urlinfo.scheme == 'file':
# points to a path on disk
try:
with open(urlinfo.path, 'r') as f:
jwt_txt = f.read()
except Exception as e:
if BLOCKSTACK_TEST:
log.exception(e)
log.warning("Failed to read {}".format(url))
return None
else:
# http(s) URL or similar
try:
resp = requests.get(url)
assert resp.status_code == 200, 'Bad status code on {}: {}'.format(url, resp.status_code)
jwt_txt = resp.text
except Exception as e:
if BLOCKSTACK_TEST:
log.exception(e)
log.warning("Unable to resolve {}".format(url))
return None
try:
# one of two things are possible:
# * this is a JWT string
# * this is a serialized JSON string whose first item is a dict that has 'token' as key,
# and that key is a JWT string.
try:
jwt_txt = json.loads(jwt_txt)[0]['token']
except:
pass
jwt = jsontokens.decode_token(jwt_txt)
except Exception as e:
if BLOCKSTACK_TEST:
log.exception(e)
log.warning("Unable to decode token at {}".format(url))
return None
try:
# must be well-formed
assert isinstance(jwt, dict)
assert 'payload' in jwt, jwt
assert isinstance(jwt['payload'], dict)
assert 'issuer' in jwt['payload'], jwt
assert isinstance(jwt['payload']['issuer'], dict)
assert 'publicKey' in jwt['payload']['issuer'], jwt
assert virtualchain.ecdsalib.ecdsa_public_key(str(jwt['payload']['issuer']['publicKey']))
except AssertionError as ae:
if BLOCKSTACK_TEST or BLOCKSTACK_DEBUG:
log.exception(ae)
log.warning("JWT at {} is malformed".format(url))
return None
if address is not None:
public_key = str(jwt['payload']['issuer']['publicKey'])
addrs = [virtualchain.address_reencode(virtualchain.ecdsalib.ecdsa_public_key(keylib.key_formatting.decompress(public_key)).address()),
virtualchain.address_reencode(virtualchain.ecdsalib.ecdsa_public_key(keylib.key_formatting.compress(public_key)).address())]
if virtualchain.address_reencode(address) not in addrs:
# got a JWT, but it doesn't match the address
log.warning("Found JWT at {}, but its public key has addresses {} and {} (expected {})".format(url, addrs[0], addrs[1], address))
return None
verifier = jsontokens.TokenVerifier()
if not verifier.verify(jwt_txt, public_key):
# got a JWT, and the address matches, but the signature does not
log.warning("Found JWT at {}, but it was not signed by {} ({})".format(url, public_key, address))
return None
return jwt
|
java
|
public boolean shouldDisplay(String operationId, PMSecurityUser user) {
if (operationId == null) {
return false;
}
//First we check permissions
for (FieldOperationConfig config : getConfigs()) {
if (config.includes(operationId)) {
if (config.getPerm() != null && user != null && !user.hasPermission(config.getPerm())) {
return false;
}
}
}
if (getDisplay().equalsIgnoreCase("all")) {
return true;
}
final String[] split = getDisplay().split("[ ]");
for (String string : split) {
if (string.equalsIgnoreCase(operationId)) {
return true;
}
}
return false;
}
|
python
|
def _to_dict(self):
"""Return keyrange's state as a dict.
:rtype: dict
:returns: state of this instance.
"""
mapping = {}
if self.start_open:
mapping["start_open"] = self.start_open
if self.start_closed:
mapping["start_closed"] = self.start_closed
if self.end_open:
mapping["end_open"] = self.end_open
if self.end_closed:
mapping["end_closed"] = self.end_closed
return mapping
|
java
|
public void setRetainViewMode(@NonNull RetainViewMode retainViewMode) {
this.retainViewMode = retainViewMode != null ? retainViewMode : RetainViewMode.RELEASE_DETACH;
if (this.retainViewMode == RetainViewMode.RELEASE_DETACH && !attached) {
removeViewReference();
}
}
|
java
|
public static final double compact(long sign, double natlog) {
return Double.longBitsToDouble(sign | (FLOAT_MASK & Double.doubleToRawLongBits(natlog)));
}
|
java
|
protected void configureHandlerInterceptor(ApplicationContext context) {
if (context.containsBean(HANDLER_INTERCEPTOR_BEAN_NAME)) {
DelegatingHandlerInterceptor handlerInterceptor = context.getBean(HANDLER_INTERCEPTOR_BEAN_NAME, DelegatingHandlerInterceptor.class);
handlerInterceptor.setInterceptors(adaptInterceptors(httpServer.getInterceptors(), context));
}
}
|
python
|
def filter_slaves(selfie, slaves):
"""
Remove slaves that are in an ODOWN or SDOWN state
also remove slaves that do not have 'ok' master-link-status
"""
return [(s['ip'], s['port']) for s in slaves
if not s['is_odown'] and
not s['is_sdown'] and
s['master-link-status'] == 'ok']
|
python
|
def pre_filter(self):
""" Return rTorrent condition to speed up data transfer.
"""
if self._name not in self.PRE_FILTER_FIELDS or self._template:
return ''
if not self._value:
return '"equal={},cat="'.format(self.PRE_FILTER_FIELDS[self._name])
if self._is_regex:
needle = self._value[1:-1]
needle = self.CLEAN_PRE_VAL_RE.sub(' ', needle)
needle = self.SPLIT_PRE_VAL_RE.split(needle)
else:
needle = self.CLEAN_PRE_VAL_RE.sub(' ', self._value)
needle = self.SPLIT_PRE_GLOB_RE.split(needle)
needle = list(sorted(needle, key=len))[-1]
if needle:
try:
needle.encode('ascii')
except UnicodeEncodeError:
return ''
else:
return r'"string.contains_i=${},\"{}\""'.format(
self.PRE_FILTER_FIELDS[self._name], needle.replace('"', r'\\\"'))
return ''
|
java
|
protected void initializeExecutorServices() {
final int downloadPoolSize;
final int max = settings.getInt(Settings.KEYS.MAX_DOWNLOAD_THREAD_POOL_SIZE, 3);
if (DOWNLOAD_THREAD_POOL_SIZE > max) {
downloadPoolSize = max;
} else {
downloadPoolSize = DOWNLOAD_THREAD_POOL_SIZE;
}
downloadExecutorService = Executors.newFixedThreadPool(downloadPoolSize);
processingExecutorService = Executors.newFixedThreadPool(PROCESSING_THREAD_POOL_SIZE);
LOGGER.debug("#download threads: {}", downloadPoolSize);
LOGGER.debug("#processing threads: {}", PROCESSING_THREAD_POOL_SIZE);
}
|
python
|
def do_edit_settings(fake):
"""Opens legit settings in editor."""
path = resources.user.open('config.ini').name
click.echo('Legit Settings:\n')
for (option, _, description) in legit_settings.config_defaults:
click.echo(columns([crayons.yellow(option), 25], [description, None]))
click.echo("") # separate settings info from os output
if fake:
click.echo(crayons.red('Faked! >>> edit {}'.format(path)))
else:
click.edit(path)
|
java
|
public ListenableFuture<ChangeRequestsSnapshot<DataSegmentChangeRequest>> getSegmentChangesSince(ChangeRequestHistory.Counter counter)
{
if (counter.getCounter() < 0) {
synchronized (lock) {
Iterable<DataSegmentChangeRequest> segments = Iterables.transform(
segmentLookup.keySet(),
new Function<DataSegment, DataSegmentChangeRequest>()
{
@Nullable
@Override
public SegmentChangeRequestLoad apply(DataSegment input)
{
return new SegmentChangeRequestLoad(input);
}
}
);
SettableFuture<ChangeRequestsSnapshot<DataSegmentChangeRequest>> future = SettableFuture.create();
future.set(ChangeRequestsSnapshot.success(changes.getLastCounter(), Lists.newArrayList(segments)));
return future;
}
} else {
return changes.getRequestsSince(counter);
}
}
|
java
|
public static String removeWhitespace(String string) {
if (string == null || string.length() == 0) {
return string;
} else {
int codePoints = string.codePointCount(0, string.length());
StringBuilder sb = new StringBuilder();
for (int i = 0; i < codePoints; i++) {
int offset = string.offsetByCodePoints(0, i);
int nextCodePoint = string.codePointAt(offset);
if (!Character.isWhitespace(nextCodePoint)) {
sb.appendCodePoint(nextCodePoint);
}
}
if (string.length() == sb.length()) {
return string;
} else {
return sb.toString();
}
}
}
|
java
|
@Override
public void close() {
closeComponent(this.containerManager);
closeComponent(this.containerRegistry);
closeComponent(this.dataLogFactory);
closeComponent(this.readIndexFactory);
closeComponent(this.cacheFactory);
this.cacheManager.close();
this.threadPoolMetrics.close();
ExecutorServiceHelpers.shutdown(SHUTDOWN_TIMEOUT, this.storageExecutor, this.coreExecutor);
}
|
java
|
public static Map<String, Object> findFreeMarkerExtensions(Furnace furnace, GraphRewrite event)
{
Imported<WindupFreeMarkerMethod> freeMarkerMethods = furnace.getAddonRegistry().getServices(
WindupFreeMarkerMethod.class);
Map<String, Object> results = new HashMap<>();
for (WindupFreeMarkerMethod freeMarkerMethod : freeMarkerMethods)
{
freeMarkerMethod.setContext(event);
if (results.containsKey(freeMarkerMethod.getMethodName()))
{
throw new WindupException(Util.WINDUP_BRAND_NAME_ACRONYM+" contains two freemarker extension providing the same name: "
+ freeMarkerMethod.getMethodName());
}
results.put(freeMarkerMethod.getMethodName(), freeMarkerMethod);
}
Imported<WindupFreeMarkerTemplateDirective> freeMarkerDirectives = furnace.getAddonRegistry().getServices(
WindupFreeMarkerTemplateDirective.class);
for (WindupFreeMarkerTemplateDirective freeMarkerDirective : freeMarkerDirectives)
{
freeMarkerDirective.setContext(event);
if (results.containsKey(freeMarkerDirective.getDirectiveName()))
{
throw new WindupException(Util.WINDUP_BRAND_NAME_ACRONYM+" contains two freemarker extension providing the same name: "
+ freeMarkerDirective.getDirectiveName());
}
results.put(freeMarkerDirective.getDirectiveName(), freeMarkerDirective);
}
return results;
}
|
java
|
public static JavaRDD<List<Writable>> normalize(Schema schema, JavaRDD<List<Writable>> data,
List<String> skipColumns) {
return normalize(schema, data, 0, 1, skipColumns);
}
|
python
|
def get_fields_by_class(cls, field_class):
""" Return a list of field names matching a field class
:param field_class: field class object
:return: list
"""
ret = []
for key, val in getattr(cls, '_fields').items():
if isinstance(val, field_class):
ret.append(key)
return ret
|
java
|
protected ActionErrors validateBean( Object bean, String beanName, ActionMapping mapping, HttpServletRequest request )
{
MessageResources messageResources = ( MessageResources ) request.getAttribute( Globals.MESSAGES_KEY );
ExpressionAwareMessageResources.update( messageResources, bean );
//
// See if this action uses a form that defines its own message resources. If so, use those, or combine them
// with the message resources from the current module.
//
if ( mapping instanceof PageFlowActionMapping )
{
PageFlowActionMapping pfam = ( PageFlowActionMapping ) mapping;
String bundle = pfam.getFormBeanMessageResourcesKey();
if ( bundle != null )
{
MessageResources formBeanResources = ( MessageResources ) request.getAttribute( bundle );
ExpressionAwareMessageResources.update( formBeanResources, bean );
if ( formBeanResources != null )
{
if ( messageResources != null )
{
formBeanResources = new MergedMessageResources( messageResources, formBeanResources );
}
request.setAttribute( Globals.MESSAGES_KEY, formBeanResources );
messageResources = formBeanResources;
}
}
}
ServletContext servletContext = getServlet().getServletContext();
// If there's still no MessageResources for this request, create one that can evaluate expressions.
if (messageResources == null) {
messageResources = new ExpressionAwareMessageResources( bean, request, servletContext);
request.setAttribute( Globals.MESSAGES_KEY, messageResources );
}
ActionErrors errors = new ActionErrors();
//
// If the ValidatorPlugIn was initialized for this module, run it.
//
if ( Resources.getValidatorResources( servletContext, request ) != null )
{
try
{
//
// Run validations associated with the bean.
//
Validator beanV = initValidator( beanName, bean, servletContext, request, errors, page );
validatorResults = beanV.validate();
//
// Run validations associated with the action.
//
Validator actionV = initValidator( mapping.getPath(), bean, servletContext, request, errors, page );
validatorResults.merge( actionV.validate() );
}
catch ( ValidatorException e )
{
_log.error( e.getMessage(), e );
}
}
//
// If this bean implements our Validatable interface, run its validate method.
//
if ( bean instanceof Validatable )
{
( ( Validatable ) bean ).validate( mapping, request, errors );
}
// Add any additional errors specified by a subclass.
ActionErrors additionalActionErrors = getAdditionalActionErrors(mapping, request);
if (additionalActionErrors != null) {
mergeActionErrors(errors, additionalActionErrors);
}
return errors;
}
|
java
|
@Override
public CPDefinitionInventory findByPrimaryKey(Serializable primaryKey)
throws NoSuchCPDefinitionInventoryException {
CPDefinitionInventory cpDefinitionInventory = fetchByPrimaryKey(primaryKey);
if (cpDefinitionInventory == null) {
if (_log.isDebugEnabled()) {
_log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey);
}
throw new NoSuchCPDefinitionInventoryException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY +
primaryKey);
}
return cpDefinitionInventory;
}
|
java
|
public void connect(SocketAddress endpoint, int timeout) throws IOException {
String filename;
if (host == null || host.equals("localhost")) {
filename = "\\\\.\\pipe\\" + name;
} else {
filename = "\\\\" + host + "\\pipe\\" + name;
}
//use a default timeout of 100ms if no timeout set.
int usedTimeout = timeout == 0 ? 100 : timeout;
long initialNano = System.nanoTime();
do {
try {
file = new RandomAccessFile(filename, "rw");
break;
} catch (FileNotFoundException fileNotFoundException) {
try {
//using JNA if available
Kernel32.INSTANCE.WaitNamedPipe(filename, timeout);
//then retry connection
file = new RandomAccessFile(filename, "rw");
} catch (Throwable cle) {
// in case JNA not on classpath, then wait 10ms before next try.
if (System.nanoTime() - initialNano > TimeUnit.MILLISECONDS.toNanos(usedTimeout)) {
if (timeout == 0) {
throw new FileNotFoundException(fileNotFoundException.getMessage()
+ "\nplease consider set connectTimeout option, so connection can retry having access to named pipe. "
+ "\n(Named pipe can throw ERROR_PIPE_BUSY error)");
}
throw fileNotFoundException;
}
try {
TimeUnit.MILLISECONDS.sleep(5);
} catch (InterruptedException interrupted) {
IOException ioException = new IOException(
"Interruption during connection to named pipe");
ioException.initCause(interrupted);
throw ioException;
}
}
}
} while (true);
is = new InputStream() {
@Override
public int read(byte[] bytes, int off, int len) throws IOException {
return file.read(bytes, off, len);
}
@Override
public int read() throws IOException {
return file.read();
}
@Override
public int read(byte[] bytes) throws IOException {
return file.read(bytes);
}
};
os = new OutputStream() {
@Override
public void write(byte[] bytes, int off, int len) throws IOException {
file.write(bytes, off, len);
}
@Override
public void write(int value) throws IOException {
file.write(value);
}
@Override
public void write(byte[] bytes) throws IOException {
file.write(bytes);
}
};
}
|
python
|
def visit_create_library_command(element, compiler, **kw):
"""
Returns the actual sql query for the CreateLibraryCommand class.
"""
query = """
CREATE {or_replace} LIBRARY {name}
LANGUAGE pythonplu
FROM :location
WITH CREDENTIALS AS :credentials
{region}
"""
bindparams = [
sa.bindparam(
'location',
value=element.location,
type_=sa.String,
),
sa.bindparam(
'credentials',
value=element.credentials,
type_=sa.String,
),
]
if element.region is not None:
bindparams.append(sa.bindparam(
'region',
value=element.region,
type_=sa.String,
))
quoted_lib_name = compiler.preparer.quote_identifier(element.library_name)
query = query.format(name=quoted_lib_name,
or_replace='OR REPLACE' if element.replace else '',
region='REGION :region' if element.region else '')
return compiler.process(sa.text(query).bindparams(*bindparams), **kw)
|
java
|
public Paragraph paragraph() {
Paragraph paragraph = new Paragraph(before.getLeading());
paragraph.add(before);
if (numbered) {
paragraph.addSpecial(new Chunk(String.valueOf(pageN), before.getFont()));
}
if (after != null) {
paragraph.addSpecial(after);
}
paragraph.setAlignment(alignment);
return paragraph;
}
|
python
|
def _subtitlesRemoved(self, path, subNos):
"""When subtitle is removed, all syncPoints greater than removed
subtitle are decremented. SyncPoint equal to removed subtitle is also
removed."""
def action(current, count, model, row):
if count.equal > 0:
model.removeRow(row)
else:
_setSubNo(current - count.greater_equal, model, row)
def count(current, nos):
return _GtEqCount(current, nos)
self._changeSubNos(path, subNos, count, action)
|
python
|
def reference(self, ):
"""Reference a file
:returns: None
:rtype: None
:raises: None
"""
tfi = self.get_taskfileinfo_selection()
if tfi:
self.reftrack.reference(tfi)
|
python
|
def describe_file_set(modules):
"""Build a file set from a specified Python modules.
Args:
modules: Iterable of Python module to describe.
Returns:
Initialized FileSet instance describing the modules.
"""
descriptor = FileSet()
file_descriptors = []
for module in modules:
file_descriptors.append(describe_file(module))
if file_descriptors:
descriptor.files = file_descriptors
return descriptor
|
python
|
def _make_tile(self, value, background, text):
"""Renders a tile, according to its value, and background and foreground colours."""
tile = pygame.Surface((self.cell_width, self.cell_height), pygame.SRCALPHA)
pygame.draw.rect(tile, background, (0, 0, self.cell_width, self.cell_height))
# The "zero" tile doesn't have anything inside.
if value:
label = load_font(self.BOLD_NAME, 50 if value < 1000 else
(40 if value < 10000 else 30)).render(str(value), True, text)
width, height = label.get_size()
tile.blit(label, ((self.cell_width - width) / 2, (self.cell_height - height) / 2))
return tile
|
python
|
def get_user_stats(self, users, lang=None, concepts=None, since=None, recalculate=True):
"""
Finds all UserStats of given concepts and users.
Recompute UserStats if necessary
Args:
users (Optional[list of users] or [user]): list of primary keys of user or users
Defaults to None meaning all users.
lang (string): use only concepts witch the lang. Defaults to None meaning all languages.
concepts (Optional[list of concepts]): list of primary keys of concepts or concepts
Defaults to None meaning all concepts.
Returns:
dict: user_id -> dict (concept_identifier - > (stat_name -> value)) -- for more users
dict: concept_identifier - > (stat_name -> value) -- for one user
"""
only_one_user = False
if not isinstance(users, list):
users = [users]
only_one_user = True
if recalculate:
if lang is None:
raise ValueError('Recalculation without lang is not supported.')
time_start = time_lib()
concepts_to_recalculate = Concept.objects.get_concepts_to_recalculate(users, lang, concepts)
LOGGER.debug("user_stats - getting identifying concepts to recalculate: %ss", (time_lib() - time_start))
time_start = time_lib()
self.recalculate_concepts(concepts_to_recalculate, lang)
LOGGER.debug("user_stats - recalculating concepts: %ss", (time_lib() - time_start))
qs = self.prepare_related().filter(user__in=users, concept__active=True)
if concepts is not None:
qs = qs.filter(concept__in=concepts)
if lang is not None:
qs = qs.filter(concept__lang=lang)
if since is not None:
qs = qs.filter(time__gte=since)
data = defaultdict(lambda: defaultdict(lambda: {}))
for user_stat in qs:
data[user_stat.user_id][user_stat.concept.identifier][user_stat.stat] = user_stat.value
if only_one_user:
return data[users[0].pk if type(users[0]) == User else users[0]]
return data
|
java
|
public static <T> Method getMethod(Class<T> c, String name, Class<?>... argTypes) {
try {
return c.getMethod(name, argTypes);
} catch(NoSuchMethodException e) {
throw new IllegalArgumentException(e);
}
}
|
java
|
public void processSpace(Properties properties) {
try {
Transform transform = null;
// Load any optionally specifie transform class
String transformClass =
properties.getProperty(MATRIX_TRANSFORM_PROPERTY);
if (transformClass != null)
transform = ReflectionUtil.getObjectInstance(
transformClass);
MatrixFile processedSpace = super.processSpace(transform);
System.out.printf("Matrix saved in %s as %s%n",
processedSpace.getFile(),
processedSpace.getFormat());
wordSpace = MatrixIO.readMatrix(processedSpace.getFile(),
processedSpace.getFormat());
System.out.printf("loaded word space of %d x %d%n",
wordSpace.rows(), wordSpace.columns());
} catch (IOException ioe) {
throw new IOError(ioe);
}
}
|
python
|
def cmd_map(self, args):
'''map commands'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if len(args) < 1:
print("usage: map <icon|set>")
elif args[0] == "icon":
if len(args) < 3:
print("Usage: map icon <lat> <lon> <icon>")
else:
lat = args[1]
lon = args[2]
flag = 'flag.png'
if len(args) > 3:
flag = args[3] + '.png'
icon = self.map.icon(flag)
self.map.add_object(mp_slipmap.SlipIcon('icon - %s [%u]' % (str(flag),self.icon_counter),
(float(lat),float(lon)),
icon, layer=3, rotation=0, follow=False))
self.icon_counter += 1
elif args[0] == "set":
self.map_settings.command(args[1:])
self.map.add_object(mp_slipmap.SlipBrightness(self.map_settings.brightness))
elif args[0] == "sethome":
self.cmd_set_home(args)
elif args[0] == "sethomepos":
self.cmd_set_homepos(args)
elif args[0] == "setorigin":
self.cmd_set_origin(args)
elif args[0] == "setoriginpos":
self.cmd_set_originpos(args)
elif args[0] == "zoom":
self.cmd_zoom(args)
elif args[0] == "center":
self.cmd_center(args)
elif args[0] == "follow":
self.cmd_follow(args)
else:
print("usage: map <icon|set>")
|
python
|
def get_name_or_instance_id(self, with_id=False):
"""Returns the name of an instance if existant, else return the instance id
Args:
with_id (bool): Include the instance ID even if the name is found (default: False)
Returns:
Name and/or instance ID of the instance object
"""
name = self.get_tag('Name', case_sensitive=False)
if name and len(name.value.strip()) > 0:
return '{0} ({1})'.format(name.value, self.id) if with_id else name.value
return self.id
|
python
|
def pin(value):
'''A small pin that represents the result of the build process'''
if value is False:
return draw_pin('Build Failed', 'red')
elif value is True:
return draw_pin('Build Passed')
elif value is NOT_FOUND:
return draw_pin('Build N / A', 'lightGray', 'black')
return draw_pin('In progress ...', 'lightGray', 'black')
|
java
|
public boolean handleRemoteCriteria(StringBuffer strFilter, boolean bIncludeFileName, Vector<BaseField> vParamList)
{
BaseListener nextListener = this.getNextEnabledListener();
if (nextListener != null)
return ((FileListener)nextListener).doRemoteCriteria(strFilter, bIncludeFileName, vParamList);
else
return this.doRemoteCriteria(strFilter, bIncludeFileName, vParamList);
}
|
python
|
def prepare_notes(self, *notes, **keyword_notes):
"""Get injection values for all given notes."""
__partial = keyword_notes.pop('__partial', False)
args = tuple(self.get(note) for note in notes)
kwargs = {}
for arg in keyword_notes:
note = keyword_notes[arg]
if isinstance(note, tuple) and len(note) == 2 and note[0] == MAYBE:
try:
kwargs[arg] = self.get(note[1])
except LookupError:
continue
elif __partial:
try:
kwargs[arg] = self.get(note)
except LookupError:
continue
else:
kwargs[arg] = self.get(note)
return args, kwargs
|
java
|
@Override
public Behavior initialBehavior(Optional<BlogState> snapshotState) {
if (snapshotState.isPresent() && !snapshotState.get().isEmpty()) {
// behavior after snapshot must be restored by initialBehavior
// if we have a non-empty BlogState we know that the initial
// AddPost has been performed
return becomePostAdded(snapshotState.get());
} else {
// behavior when no snapshot is used
BehaviorBuilder b = newBehaviorBuilder(BlogState.EMPTY);
// TODO define command and event handlers
return b.build();
}
}
|
java
|
public Model getModel(MP params) {
Key<Model> mKey = getModelKey(params);
return mKey != null ? mKey.get() : null;
}
|
python
|
def compile(self):
"""
This method isn't yet idempotent; calling multiple times may yield
unexpected results
"""
# Can't tell if this is a hack or not. Revisit later
self.context.set_query(self)
# If any subqueries, translate them and add to beginning of query as
# part of the WITH section
with_frag = self.format_subqueries()
# SELECT
select_frag = self.format_select_set()
# FROM, JOIN, UNION
from_frag = self.format_table_set()
# WHERE
where_frag = self.format_where()
# GROUP BY and HAVING
groupby_frag = self.format_group_by()
# ORDER BY
order_frag = self.format_order_by()
# LIMIT
limit_frag = self.format_limit()
# Glue together the query fragments and return
query = '\n'.join(
filter(
None,
[
with_frag,
select_frag,
from_frag,
where_frag,
groupby_frag,
order_frag,
limit_frag,
],
)
)
return query
|
java
|
public GetMaterialTotalCountResponse countMaterial(){
GetMaterialTotalCountResponse response = null;
String url = BASE_API_URL + "cgi-bin/material/get_materialcount?access_token=#";
BaseResponse r = executeGet(url);
String resultJson = isSuccess(r.getErrcode()) ? r.getErrmsg() : r.toJsonString();
response = JSONUtil.toBean(resultJson, GetMaterialTotalCountResponse.class);
return response;
}
|
python
|
def print_environment_vars(env):
"""
Print ``Ansible`` and ``Molecule`` environment variables and returns None.
:param env: A dict containing the shell's environment as collected by
``os.environ``.
:return: None
"""
ansible_env = {k: v for (k, v) in env.items() if 'ANSIBLE_' in k}
print_debug('ANSIBLE ENVIRONMENT', safe_dump(ansible_env))
molecule_env = {k: v for (k, v) in env.items() if 'MOLECULE_' in k}
print_debug('MOLECULE ENVIRONMENT', safe_dump(molecule_env))
combined_env = ansible_env.copy()
combined_env.update(molecule_env)
print_debug(
'SHELL REPLAY', " ".join(
["{}={}".format(k, v) for (k, v) in sorted(combined_env.items())]))
print()
|
java
|
public static DumpData toDumpData(Node node, PageContext pageContext, int maxlevel, DumpProperties props) {
if (maxlevel <= 0) {
return DumpUtil.MAX_LEVEL_REACHED;
}
maxlevel--;
// Document
if (node instanceof Document) {
DumpTable table = new DumpTable("xml", "#cc9999", "#ffffff", "#000000");
table.setTitle("XML Document");
table.appendRow(1, new SimpleDumpData("XmlComment"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLCOMMENT, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlRoot"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLROOT, null), pageContext, maxlevel, props));
return table;
}
// Element
if (node instanceof Element) {
DumpTable table = new DumpTable("xml", "#cc9999", "#ffffff", "#000000");
table.setTitle("XML Element");
table.appendRow(1, new SimpleDumpData("xmlName"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNAME, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlNsPrefix"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNSPREFIX, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlNsURI"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNSURI, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlText"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLTEXT, null), pageContext, maxlevel, props));
table.appendRow(1, new SimpleDumpData("XmlComment"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLCOMMENT, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlAttributes"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLATTRIBUTES, null), pageContext, maxlevel, props));
table.appendRow(1, new SimpleDumpData("XmlChildren"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLCHILDREN, null), pageContext, maxlevel, props));
return table;
}
// Text
if (node instanceof Text) {
DumpTable table = new DumpTable("xml", "#cc9999", "#ffffff", "#000000");
table.setTitle("XML Text");
Text txt = (Text) node;
table.appendRow(1, new SimpleDumpData("XmlText"), new SimpleDumpData(txt.getData()));
return table;
}
// Attr
if (node instanceof Attr) {
DumpTable table = new DumpTable("xml", "#cc9999", "#ffffff", "#000000");
table.setTitle("XML Attr");
table.appendRow(1, new SimpleDumpData("xmlName"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNAME, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlValue"), DumpUtil.toDumpData(((Attr) node).getValue(), pageContext, maxlevel, props));
table.appendRow(1, new SimpleDumpData("XmlType"), new SimpleDumpData(XMLUtil.getTypeAsString(node, true)));
return table;
}
// Node
DumpTable table = new DumpTable("xml", "#cc9999", "#ffffff", "#000000");
table.setTitle("XML Node (" + ListUtil.last(node.getClass().getName(), ".", true) + ")");
table.appendRow(1, new SimpleDumpData("xmlName"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNAME, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlNsPrefix"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNSPREFIX, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlNsURI"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLNSURI, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlText"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLTEXT, null), pageContext, maxlevel, props));
table.appendRow(1, new SimpleDumpData("XmlComment"), new SimpleDumpData(XMLUtil.getProperty(node, XMLUtil.XMLCOMMENT, null).toString()));
table.appendRow(1, new SimpleDumpData("XmlAttributes"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLATTRIBUTES, null), pageContext, maxlevel, props));
table.appendRow(1, new SimpleDumpData("XmlChildren"), DumpUtil.toDumpData(XMLUtil.getProperty(node, XMLUtil.XMLCHILDREN, null), pageContext, maxlevel, props));
table.appendRow(1, new SimpleDumpData("XmlType"), new SimpleDumpData(XMLUtil.getTypeAsString(node, true)));
return table;
}
|
python
|
def listfiles(data_name):
"""
List files in a dataset.
"""
data_source = get_data_object(data_name, use_data_config=False)
if not data_source:
if 'output' in data_name:
floyd_logger.info("Note: You cannot clone the output of a running job. You need to wait for it to finish.")
sys.exit()
# Depth-first search
dirs = ['']
paths = []
while dirs:
cur_dir = dirs.pop()
url = "/resources/{}/{}?content=true".format(data_source.resource_id, cur_dir)
response = DataClient().request("GET", url).json()
if response['skipped_files'] > 0:
floyd_logger.info("Warning: in directory '%s', %s/%s files skipped (too many files)", cur_dir, response['skipped_files'], response['total_files'])
files = response['files']
files.sort(key=lambda f: f['name'])
for f in files:
path = os.path.join(cur_dir, f['name'])
if f['type'] == 'directory':
path += os.sep
paths.append(path)
if f['type'] == 'directory':
dirs.append(os.path.join(cur_dir, f['name']))
for path in paths:
floyd_logger.info(path)
|
java
|
public static QuantityFeature<AbstractSequence<NucleotideCompound>, NucleotideCompound> createErrorProbabilities(final Fastq fastq)
{
if (fastq == null)
{
throw new IllegalArgumentException("fastq must not be null");
}
QuantityFeature<AbstractSequence<NucleotideCompound>, NucleotideCompound> errorProbabilities = new QuantityFeature<AbstractSequence<NucleotideCompound>, NucleotideCompound>("errorProbabilities", "sequencing");
errorProbabilities.setQuantities(toList(errorProbabilities(fastq)));
return errorProbabilities;
}
|
python
|
def print_mhc_peptide(neoepitope_info, peptides, pepmap, outfile, netmhc=False):
"""
Accept data about one neoepitope from merge_mhc_peptide_calls and print it to outfile. This is
a generic module to reduce code redundancy.
:param pandas.core.frame neoepitope_info: object containing with allele, pept, pred, core,
normal_pept, normal_pred
:param dict peptides: Dict of pepname: pep sequence for all IARS considered
:param dict pepmap: Dict containing teh contents from the peptide map file.
:param file outfile: An open file descriptor to the output file
:param bool netmhc: Does this record correspond to a netmhcIIpan record? These are processed
differently.
"""
if netmhc:
peptide_names = [neoepitope_info.peptide_name]
else:
peptide_names = [x for x, y in peptides.items() if neoepitope_info.pept in y]
# Convert named tuple to dict so it can be modified
neoepitope_info = neoepitope_info._asdict()
# Handle fusion peptides (They are characterized by having all N's as the normal partner)
if neoepitope_info['normal_pept'] == 'N' * len(neoepitope_info['pept']):
neoepitope_info['normal_pept'] = neoepitope_info['normal_pred'] = 'NA'
# For each peptide, append the ensembl gene
for peptide_name in peptide_names:
print('{ni[allele]}\t'
'{ni[pept]}\t'
'{ni[normal_pept]}\t'
'{pname}\t'
'{ni[core]}\t'
'0\t'
'{ni[tumor_pred]}\t'
'{ni[normal_pred]}\t'
'{pmap}'.format(ni=neoepitope_info, pname=peptide_name,
pmap=pepmap[peptide_name]), file=outfile)
return None
|
java
|
@SuppressWarnings({ "unchecked", "rawtypes" })
public Table aggregate(ListMultimap<String, AggregateFunction<?,?>> functions) {
Preconditions.checkArgument(!getSlices().isEmpty());
Table groupTable = summaryTableName(sourceTable);
StringColumn groupColumn = StringColumn.create("Group");
groupTable.addColumns(groupColumn);
for (Map.Entry<String, Collection<AggregateFunction<?,?>>> entry : functions.asMap().entrySet()) {
String columnName = entry.getKey();
int functionCount = 0;
for (AggregateFunction function : entry.getValue()) {
String colName = aggregateColumnName(columnName, function.functionName());
ColumnType type = function.returnType();
Column resultColumn = type.create(colName);
for (TableSlice subTable : getSlices()) {
Object result = function.summarize(subTable.column(columnName));
if (functionCount == 0) {
groupColumn.append(subTable.name());
}
if (result instanceof Number) {
Number number = (Number) result;
resultColumn.append(number.doubleValue());
} else {
resultColumn.append(result);
}
}
groupTable.addColumns(resultColumn);
functionCount++;
}
}
return splitGroupingColumn(groupTable);
}
|
java
|
private boolean StringFieldMatch (String original, String matching) {
boolean equal = false;
original = original.toLowerCase().trim();
matching = matching.toLowerCase();
if (original.endsWith(matching)) {
equal = true;
}
else if (original.startsWith(matching)) {
equal = true;
}
return equal;
}
|
python
|
def get_meshables(self):
"""
return a list of components that are meshable (generally stars, but handles
the envelope for an contact_binary)
"""
l = re.findall(r"[\w']+", self.get_value())
# now search for indices of star and take the next entry from this flat list
meshables = [l[i+1] for i,s in enumerate(l) if s in ['star', 'envelope']]
# now we want to remove any star which has a sibling envelope
has_sibling_envelope = []
for item in meshables:
if self.get_sibling_of(item, kind='envelope'):
has_sibling_envelope.append(item)
return [m for m in meshables if m not in has_sibling_envelope]
|
python
|
def _parse_mode(mode):
'''
Converts ls mode output (rwxrwxrwx) -> integer (755).
'''
result = ''
# owner, group, world
for group in [mode[0:3], mode[3:6], mode[6:9]]:
if group in SYMBOL_TO_OCTAL_PERMISSIONS:
result = '{0}{1}'.format(result, SYMBOL_TO_OCTAL_PERMISSIONS[group])
else:
result = '{0}0'.format(result)
# Return as an integer
return int(result)
|
java
|
public static BitMatrix encode(String content, QrConfig config) {
return encode(content, BarcodeFormat.QR_CODE, config);
}
|
java
|
public static com.liferay.commerce.model.CommerceWarehouseItem deleteCommerceWarehouseItem(
com.liferay.commerce.model.CommerceWarehouseItem commerceWarehouseItem) {
return getService().deleteCommerceWarehouseItem(commerceWarehouseItem);
}
|
java
|
public JBBPOut Float(final float... value) throws IOException {
assertNotEnded();
assertArrayNotNull(value);
if (this.processCommands) {
for (final float f : value) {
_writeFloat(f);
}
}
return this;
}
|
java
|
private String[] getHeaders() {
final List<String> headers = new ArrayList<>();
if(decorator.getShowSources()){
headers.add(SOURCE_FIELD);
}
if(decorator.getShowSourcesVersion()){
headers.add(SOURCE_VERSION_FIELD);
}
if(decorator.getShowTargets()){
headers.add(TARGET_FIELD);
}
if(decorator.getShowTargetsDownloadUrl()){
headers.add(DOWNLOAD_URL_FIELD);
}
if(decorator.getShowTargetsSize()){
headers.add(SIZE_FIELD);
}
if(decorator.getShowScopes()){
headers.add(SCOPE_FIELD);
}
if(decorator.getShowLicenses()){
headers.add(LICENSE_FIELD);
}
if(decorator.getShowLicensesLongName()){
headers.add(LICENSE_LONG_NAME_FIELD);
}
if(decorator.getShowLicensesUrl()){
headers.add(LICENSE_URL_FIELD);
}
if(decorator.getShowLicensesComment()){
headers.add(LICENSE_COMMENT_FIELD);
}
return headers.toArray(new String[headers.size()]);
}
|
java
|
public ServiceFuture<ElasticPoolInner> beginUpdateAsync(String resourceGroupName, String serverName, String elasticPoolName, ElasticPoolUpdate parameters, final ServiceCallback<ElasticPoolInner> serviceCallback) {
return ServiceFuture.fromResponse(beginUpdateWithServiceResponseAsync(resourceGroupName, serverName, elasticPoolName, parameters), serviceCallback);
}
|
python
|
def getAll(self):
'''Return a dictionary with all variables'''
if not bool(len(self.ATTRIBUTES)):
self.load_attributes()
return eval(str(self.ATTRIBUTES))
|
java
|
@Override
public boolean filter(Object entity) {
if (fixture instanceof FilterableFixture) {
return filterableFixture().filter(entity);
}
return true;
}
|
python
|
def from_cli_multi_ifos(opt, ifos, **kwargs):
"""
Get the strain for all ifos when using the multi-detector CLI
"""
strain = {}
for ifo in ifos:
strain[ifo] = from_cli_single_ifo(opt, ifo, **kwargs)
return strain
|
java
|
public static void swap(byte[] byteArray1, byte[] byteArray2, int index) {
TrivialSwap.swap(byteArray1, index, byteArray2, index);
}
|
java
|
public Observable<Map<String, String>> listEndpointsAsync(UUID appId) {
return listEndpointsWithServiceResponseAsync(appId).map(new Func1<ServiceResponse<Map<String, String>>, Map<String, String>>() {
@Override
public Map<String, String> call(ServiceResponse<Map<String, String>> response) {
return response.body();
}
});
}
|
python
|
def ext_publish(self, instance, loop, *args, **kwargs):
"""If 'external_signaller' is defined, calls it's publish method to
notify external event systems.
This is for internal usage only, but it's doumented because it's part
of the interface with external notification systems.
"""
if self.external_signaller is not None:
# Assumes that the loop is managed by the external handler
return self.external_signaller.publish_signal(self, instance, loop,
args, kwargs)
|
java
|
public static IntegerBinding negateExact(final ObservableIntegerValue a) {
return createIntegerBinding(() -> Math.negateExact(a.get()), a);
}
|
python
|
def addChildFn(self, fn, *args, **kwargs):
"""
Adds a function as a child job.
:param fn: Function to be run as a child job with ``*args`` and ``**kwargs`` as \
arguments to this function. See toil.job.FunctionWrappingJob for reserved \
keyword arguments used to specify resource requirements.
:return: The new child job that wraps fn.
:rtype: toil.job.FunctionWrappingJob
"""
if PromisedRequirement.convertPromises(kwargs):
return self.addChild(PromisedRequirementFunctionWrappingJob.create(fn, *args, **kwargs))
else:
return self.addChild(FunctionWrappingJob(fn, *args, **kwargs))
|
python
|
def get_numeric_score_increment(self):
"""Gets the incremental step.
return: (decimal) - the increment
raise: IllegalState - ``is_based_on_grades()`` is ``true``
*compliance: mandatory -- This method must be implemented.*
"""
if self.is_based_on_grades():
raise errors.IllegalState('This GradeSystem is based on grades')
if self._my_map['numericScoreIncrement'] is None:
return None
else:
return Decimal(str(self._my_map['numericScoreIncrement']))
|
java
|
public Observable<LegalHoldInner> clearLegalHoldAsync(String resourceGroupName, String accountName, String containerName, List<String> tags) {
return clearLegalHoldWithServiceResponseAsync(resourceGroupName, accountName, containerName, tags).map(new Func1<ServiceResponse<LegalHoldInner>, LegalHoldInner>() {
@Override
public LegalHoldInner call(ServiceResponse<LegalHoldInner> response) {
return response.body();
}
});
}
|
python
|
def match_file(pattern, filename):
'''
The function will match a pattern in a file and return
a rex object, which will have all the matches found in the file.
'''
# Validate user data.
if pattern is None:
return None
if os.stat(filename).st_size == 0:
return None
rexobj = REX(pattern, filename)
rexpatstr = reformat_pattern(pattern)
#print "rexpatstr: ", rexpatstr
rexpat = re.compile(rexpatstr)
rexobj.rex_patternstr = rexpatstr
rexobj.rex_pattern = rexpat
sfile = open(filename, 'r')
data = sfile.read()
sfile.close()
line_count = 1
for line in data.splitlines():
mobj = rexpat.match(line)
if mobj:
populate_resobj(rexobj, mobj, line_count)
line_count += 1
return rexobj
|
java
|
@Override
public <TResponse> TResponse post(IReturn<TResponse> request) {
return send(createUrl(request, HttpMethods.Post), HttpMethods.Post, request, request.getResponseType());
}
|
java
|
private Asset getUnverifiedAsset(String assetId) throws IOException, RequestFailureException {
HttpURLConnection connection = createHttpURLConnectionToMassive("/assets/"
+ assetId);
connection.setRequestMethod("GET");
testResponseCode(connection);
return JSONAssetConverter.readUnverifiedValue(connection.getInputStream());
}
|
java
|
public<V> boolean isConsolidated(Function<E,V> convertAdds, Function<K,V> convertDels) {
int delBefore = getDeletions().size();
consolidate(convertAdds,convertDels);
return getDeletions().size()==delBefore;
}
|
java
|
public static Object invoke(Object object, String methodName, Object[] parameters) {
try {
Class[] classTypes = new Class[parameters.length];
for (int i = 0; i < classTypes.length; i++) {
classTypes[i] = parameters[i].getClass();
}
Method method = object.getClass().getMethod(methodName, classTypes);
return method.invoke(object, parameters);
} catch (Throwable t) {
return InvokerHelper.invokeMethod(object, methodName, parameters);
}
}
|
java
|
static ImmutableList<NameValuePair> buildParameters(
String analyticsId,
String clientId,
String virtualPageName,
String virtualPageTitle,
String eventType,
String eventName,
boolean isUserSignedIn,
boolean isUserInternal,
Optional<Boolean> isUserTrialEligible,
Optional<String> projectNumberHash,
Optional<String> billingIdHash,
Optional<String> clientHostname,
Random random) {
checkNotNull(analyticsId);
checkNotNull(clientId);
checkNotNull(virtualPageTitle);
checkNotNull(virtualPageName);
checkNotNull(eventType);
checkNotNull(eventName);
checkNotNull(projectNumberHash);
checkNotNull(billingIdHash);
checkNotNull(clientHostname);
checkNotNull(random);
ImmutableList.Builder<NameValuePair> listBuilder = new ImmutableList.Builder<>();
// Analytics information
// Protocol version
listBuilder.add(new BasicNameValuePair(PARAM_PROTOCOL, "1"));
// Analytics ID to send report to
listBuilder.add(new BasicNameValuePair(PARAM_PROPERTY_ID, analyticsId));
// Always report as a pageview
listBuilder.add(new BasicNameValuePair(PARAM_TYPE, VALUE_TYPE_PAGEVIEW));
// Always report as interactive
listBuilder.add(new BasicNameValuePair(PARAM_IS_NON_INTERACTIVE, VALUE_FALSE));
// Add a randomly generated cache buster
listBuilder.add(new BasicNameValuePair(PARAM_CACHEBUSTER, Long.toString(random.nextLong())));
// Event information
listBuilder.add(new BasicNameValuePair(PARAM_EVENT_TYPE, eventType));
listBuilder.add(new BasicNameValuePair(PARAM_EVENT_NAME, eventName));
if (clientHostname.isPresent() && !clientHostname.get().isEmpty()) {
listBuilder.add(new BasicNameValuePair(PARAM_HOSTNAME, clientHostname.get()));
}
// User information
listBuilder.add(new BasicNameValuePair(PARAM_CLIENT_ID, clientId));
if (projectNumberHash.isPresent() && !projectNumberHash.get().isEmpty()) {
listBuilder.add(new BasicNameValuePair(PARAM_PROJECT_NUM_HASH, projectNumberHash.get()));
}
if (billingIdHash.isPresent() && !billingIdHash.get().isEmpty()) {
listBuilder.add(new BasicNameValuePair(PARAM_BILLING_ID_HASH, billingIdHash.get()));
}
listBuilder.add(new BasicNameValuePair(PARAM_USER_SIGNED_IN, toValue(isUserSignedIn)));
listBuilder.add(new BasicNameValuePair(PARAM_USER_INTERNAL, toValue(isUserInternal)));
if (isUserTrialEligible.isPresent()) {
listBuilder.add(new BasicNameValuePair(PARAM_USER_TRIAL_ELIGIBLE,
toValue(isUserTrialEligible.get())));
}
// Virtual page information
listBuilder.add(new BasicNameValuePair(PARAM_IS_VIRTUAL, VALUE_TRUE));
listBuilder.add(new BasicNameValuePair(PARAM_PAGE, virtualPageName));
if (!virtualPageTitle.isEmpty()) {
listBuilder.add(new BasicNameValuePair(PARAM_PAGE_TITLE, virtualPageTitle));
}
return listBuilder.build();
}
|
java
|
public Observable<Void> suspendAsync(String resourceGroupName, String automationAccountName, UUID jobId) {
return suspendWithServiceResponseAsync(resourceGroupName, automationAccountName, jobId).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
|
python
|
def _repair_row(self):
'''
Searches for missing titles that can be inferred from the surrounding data and automatically
repairs those titles.
'''
# Repair any title rows
check_for_title = True
for row_index in range(self.start[0], self.end[0]):
table_row = self.table[row_index]
row_start = table_row[self.start[1]]
# Look for empty cells leading titles
if check_for_title and is_empty_cell(row_start):
self._stringify_row(row_index)
# Check for year titles in column or row
elif (isinstance(row_start, basestring) and
re.search(allregex.year_regex, row_start)):
self._check_stringify_year_row(row_index)
else:
check_for_title = False
|
java
|
@Deprecated
public final <T extends Query> T addGroupByField(String fieldname) {
return addGroupByField(new SimpleField(fieldname));
}
|
python
|
def get_global_dist_packages_dir():
"""
Attempts to work around virtualenvs and find the system dist_pacakges.
Essentially this is implmenented as a lookuptable
"""
import utool as ut
if not ut.in_virtual_env():
# Non venv case
return get_site_packages_dir()
else:
candidates = []
if ut.LINUX:
import sys
candidates += [
'/usr/lib/python%s/dist-packages' % (sys.version[0:3],),
'/usr/lib/python%s/dist-packages' % (sys.version[0:1],),
]
else:
raise NotImplementedError()
for path in candidates:
if ut.checkpath(path):
return path
|
java
|
private boolean parse() {
///CLOVER:OFF
if (vtzlines == null || vtzlines.size() == 0) {
return false;
}
///CLOVER:ON
// timezone ID
String tzid = null;
int state = INI;
boolean dst = false; // current zone type
String from = null; // current zone from offset
String to = null; // current zone offset
String tzname = null; // current zone name
String dtstart = null; // current zone starts
boolean isRRULE = false; // true if the rule is described by RRULE
List<String> dates = null; // list of RDATE or RRULE strings
List<TimeZoneRule> rules = new ArrayList<TimeZoneRule>(); // rule list
int initialRawOffset = 0; // initial offset
int initialDSTSavings = 0; // initial offset
long firstStart = MAX_TIME; // the earliest rule start time
for (String line : vtzlines) {
int valueSep = line.indexOf(COLON);
if (valueSep < 0) {
continue;
}
String name = line.substring(0, valueSep);
String value = line.substring(valueSep + 1);
switch (state) {
case INI:
if (name.equals(ICAL_BEGIN) && value.equals(ICAL_VTIMEZONE)) {
state = VTZ;
}
break;
case VTZ:
if (name.equals(ICAL_TZID)) {
tzid = value;
} else if (name.equals(ICAL_TZURL)) {
tzurl = value;
} else if (name.equals(ICAL_LASTMOD)) {
// Always in 'Z' format, so the offset argument for the parse method
// can be any value.
lastmod = new Date(parseDateTimeString(value, 0));
} else if (name.equals(ICAL_BEGIN)) {
boolean isDST = value.equals(ICAL_DAYLIGHT);
if (value.equals(ICAL_STANDARD) || isDST) {
// tzid must be ready at this point
if (tzid == null) {
state = ERR;
break;
}
// initialize current zone properties
dates = null;
isRRULE = false;
from = null;
to = null;
tzname = null;
dst = isDST;
state = TZI;
} else {
// BEGIN property other than STANDARD/DAYLIGHT
// must not be there.
state = ERR;
break;
}
} else if (name.equals(ICAL_END) /* && value.equals(ICAL_VTIMEZONE) */) {
break;
}
break;
case TZI:
if (name.equals(ICAL_DTSTART)) {
dtstart = value;
} else if (name.equals(ICAL_TZNAME)) {
tzname = value;
} else if (name.equals(ICAL_TZOFFSETFROM)) {
from = value;
} else if (name.equals(ICAL_TZOFFSETTO)) {
to = value;
} else if (name.equals(ICAL_RDATE)) {
// RDATE mixed with RRULE is not supported
if (isRRULE) {
state = ERR;
break;
}
if (dates == null) {
dates = new LinkedList<String>();
}
// RDATE value may contain multiple date delimited
// by comma
StringTokenizer st = new StringTokenizer(value, COMMA);
while (st.hasMoreTokens()) {
String date = st.nextToken();
dates.add(date);
}
} else if (name.equals(ICAL_RRULE)) {
// RRULE mixed with RDATE is not supported
if (!isRRULE && dates != null) {
state = ERR;
break;
} else if (dates == null) {
dates = new LinkedList<String>();
}
isRRULE = true;
dates.add(value);
} else if (name.equals(ICAL_END)) {
// Mandatory properties
if (dtstart == null || from == null || to == null) {
state = ERR;
break;
}
// if tzname is not available, create one from tzid
if (tzname == null) {
tzname = getDefaultTZName(tzid, dst);
}
// create a time zone rule
TimeZoneRule rule = null;
int fromOffset = 0;
int toOffset = 0;
int rawOffset = 0;
int dstSavings = 0;
long start = 0;
try {
// Parse TZOFFSETFROM/TZOFFSETTO
fromOffset = offsetStrToMillis(from);
toOffset = offsetStrToMillis(to);
if (dst) {
// If daylight, use the previous offset as rawoffset if positive
if (toOffset - fromOffset > 0) {
rawOffset = fromOffset;
dstSavings = toOffset - fromOffset;
} else {
// This is rare case.. just use 1 hour DST savings
rawOffset = toOffset - DEF_DSTSAVINGS;
dstSavings = DEF_DSTSAVINGS;
}
} else {
rawOffset = toOffset;
dstSavings = 0;
}
// start time
start = parseDateTimeString(dtstart, fromOffset);
// Create the rule
Date actualStart = null;
if (isRRULE) {
rule = createRuleByRRULE(tzname, rawOffset, dstSavings, start, dates, fromOffset);
} else {
rule = createRuleByRDATE(tzname, rawOffset, dstSavings, start, dates, fromOffset);
}
if (rule != null) {
actualStart = rule.getFirstStart(fromOffset, 0);
if (actualStart.getTime() < firstStart) {
// save from offset information for the earliest rule
firstStart = actualStart.getTime();
// If this is STD, assume the time before this transtion
// is DST when the difference is 1 hour. This might not be
// accurate, but VTIMEZONE data does not have such info.
if (dstSavings > 0) {
initialRawOffset = fromOffset;
initialDSTSavings = 0;
} else {
if (fromOffset - toOffset == DEF_DSTSAVINGS) {
initialRawOffset = fromOffset - DEF_DSTSAVINGS;
initialDSTSavings = DEF_DSTSAVINGS;
} else {
initialRawOffset = fromOffset;
initialDSTSavings = 0;
}
}
}
}
} catch (IllegalArgumentException iae) {
// bad format - rule == null..
}
if (rule == null) {
state = ERR;
break;
}
rules.add(rule);
state = VTZ;
}
break;
}
if (state == ERR) {
vtzlines = null;
return false;
}
}
// Must have at least one rule
if (rules.size() == 0) {
return false;
}
// Create a initial rule
InitialTimeZoneRule initialRule = new InitialTimeZoneRule(getDefaultTZName(tzid, false),
initialRawOffset, initialDSTSavings);
// Finally, create the RuleBasedTimeZone
RuleBasedTimeZone rbtz = new RuleBasedTimeZone(tzid, initialRule);
int finalRuleIdx = -1;
int finalRuleCount = 0;
for (int i = 0; i < rules.size(); i++) {
TimeZoneRule r = rules.get(i);
if (r instanceof AnnualTimeZoneRule) {
if (((AnnualTimeZoneRule)r).getEndYear() == AnnualTimeZoneRule.MAX_YEAR) {
finalRuleCount++;
finalRuleIdx = i;
}
}
}
if (finalRuleCount > 2) {
// Too many final rules
return false;
}
if (finalRuleCount == 1) {
if (rules.size() == 1) {
// Only one final rule, only governs the initial rule,
// which is already initialized, thus, we do not need to
// add this transition rule
rules.clear();
} else {
// Normalize the final rule
AnnualTimeZoneRule finalRule = (AnnualTimeZoneRule)rules.get(finalRuleIdx);
int tmpRaw = finalRule.getRawOffset();
int tmpDST = finalRule.getDSTSavings();
// Find the last non-final rule
Date finalStart = finalRule.getFirstStart(initialRawOffset, initialDSTSavings);
Date start = finalStart;
for (int i = 0; i < rules.size(); i++) {
if (finalRuleIdx == i) {
continue;
}
TimeZoneRule r = rules.get(i);
Date lastStart = r.getFinalStart(tmpRaw, tmpDST);
if (lastStart.after(start)) {
start = finalRule.getNextStart(lastStart.getTime(),
r.getRawOffset(),
r.getDSTSavings(),
false);
}
}
TimeZoneRule newRule;
if (start == finalStart) {
// Transform this into a single transition
newRule = new TimeArrayTimeZoneRule(
finalRule.getName(),
finalRule.getRawOffset(),
finalRule.getDSTSavings(),
new long[] {finalStart.getTime()},
DateTimeRule.UTC_TIME);
} else {
// Update the end year
int fields[] = Grego.timeToFields(start.getTime(), null);
newRule = new AnnualTimeZoneRule(
finalRule.getName(),
finalRule.getRawOffset(),
finalRule.getDSTSavings(),
finalRule.getRule(),
finalRule.getStartYear(),
fields[0]);
}
rules.set(finalRuleIdx, newRule);
}
}
for (TimeZoneRule r : rules) {
rbtz.addTransitionRule(r);
}
tz = rbtz;
setID(tzid);
return true;
}
|
java
|
@Override
public ExecutionEntity createChildExecution(ExecutionEntity parentExecutionEntity) {
ExecutionEntity childExecution = executionDataManager.create();
inheritCommonProperties(parentExecutionEntity, childExecution);
childExecution.setParent(parentExecutionEntity);
childExecution.setProcessDefinitionId(parentExecutionEntity.getProcessDefinitionId());
childExecution.setProcessDefinitionKey(parentExecutionEntity.getProcessDefinitionKey());
childExecution.setProcessInstanceId(parentExecutionEntity.getProcessInstanceId() != null
? parentExecutionEntity.getProcessInstanceId() : parentExecutionEntity.getId());
childExecution.setParentProcessInstanceId(parentExecutionEntity.getParentProcessInstanceId());
childExecution.setScope(false);
// manage the bidirectional parent-child relation
parentExecutionEntity.addChildExecution(childExecution);
// Insert the child execution
insert(childExecution, false);
if (logger.isDebugEnabled()) {
logger.debug("Child execution {} created with parent {}", childExecution, parentExecutionEntity.getId());
}
if (getEventDispatcher().isEnabled()) {
getEventDispatcher().dispatchEvent(ActivitiEventBuilder.createEntityEvent(ActivitiEventType.ENTITY_CREATED, childExecution));
getEventDispatcher().dispatchEvent(ActivitiEventBuilder.createEntityEvent(ActivitiEventType.ENTITY_INITIALIZED, childExecution));
}
return childExecution;
}
|
java
|
private void convertCharset(final File inputFile) {
if (inputFile.isDirectory()) {
final File[] files = inputFile.listFiles();
if (files != null) {
for (final File file : files) {
convertCharset(file);
}
}
} else if (FileUtils.isHTMLFile(inputFile.getName())||
FileUtils.isHHCFile(inputFile.getName())||
FileUtils.isHHKFile(inputFile.getName())) {
final String fileName = inputFile.getAbsolutePath();
final File outputFile = new File(fileName + FILE_EXTENSION_TEMP);
log("Processing " + fileName, Project.MSG_INFO);
BufferedReader reader = null;
Writer writer = null;
try {
//prepare for the input and output
final FileInputStream inputStream = new FileInputStream(inputFile);
final InputStreamReader streamReader = new InputStreamReader(inputStream, UTF8);
reader = new BufferedReader(streamReader);
final FileOutputStream outputStream = new FileOutputStream(outputFile);
final OutputStreamWriter streamWriter = new OutputStreamWriter(outputStream, UTF8);
writer = new BufferedWriter(streamWriter);
String value = reader.readLine();
while(value != null) {
//meta tag contains charset found
if (value.contains("<meta http-equiv") && value.contains("charset")) {
final int insertPoint = value.indexOf("charset=") + "charset=".length();
final String subString = value.substring(0, insertPoint);
final int remainIndex = value.indexOf(UTF8) + UTF8.length();
final String remainString = value.substring(remainIndex);
//change the charset
final String newValue = (FileUtils.isHHCFile(inputFile.getName()) || FileUtils.isHHKFile(inputFile.getName()) ?
subString + charsetMap.get(ATTRIBUTE_FORMAT_VALUE_WINDOWS) + remainString :
subString + charsetMap.get(ATTRIBUTE_FORMAT_VALUE_HTML) + remainString);
//write into the output file
writer.write(newValue);
//add line break
writer.write(LINE_SEPARATOR);
} else {
if (value.contains(tag1)) {
value = replaceXmlTag(value, tag1);
} else if (value.contains(tag2)) {
value = replaceXmlTag(value, tag2);
} else if (value.contains(tag3)) {
value = replaceXmlTag(value, tag3);
}
//other values
writer.write(value);
writer.write(LINE_SEPARATOR);
}
value = reader.readLine();
}
} catch (final FileNotFoundException e) {
logger.error(e.getMessage(), e) ;
} catch (final UnsupportedEncodingException e) {
throw new RuntimeException(e);
} catch (final IOException e) {
logger.error(e.getMessage(), e) ;
} finally {
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
logger.error("Failed to close input stream: " + e.getMessage());
}
}
if (writer != null) {
try {
writer.close();
} catch (final IOException e) {
logger.error("Failed to close output stream: " + e.getMessage());
}
}
}
try {
deleteQuietly(inputFile);
moveFile(outputFile, inputFile);
} catch (final Exception e) {
logger.error("Failed to replace " + inputFile + ": " + e.getMessage());
}
}
}
|
python
|
def createmeta(self,
projectKeys=None,
projectIds=[],
issuetypeIds=None,
issuetypeNames=None,
expand=None,
):
"""Get the metadata required to create issues, optionally filtered by projects and issue types.
:param projectKeys: keys of the projects to filter the results with.
Can be a single value or a comma-delimited string. May be combined
with projectIds.
:type projectKeys: Union[None, Tuple[str, str], str]
:param projectIds: IDs of the projects to filter the results with. Can
be a single value or a comma-delimited string. May be combined with
projectKeys.
:type projectIds: Union[List, Tuple[str, str]]
:param issuetypeIds: IDs of the issue types to filter the results with.
Can be a single value or a comma-delimited string. May be combined
with issuetypeNames.
:type issuetypeIds: Optional[List[str]]
:param issuetypeNames: Names of the issue types to filter the results
with. Can be a single value or a comma-delimited string. May be
combined with issuetypeIds.
:type issuetypeNames: Optional[str]
:param expand: extra information to fetch inside each resource.
:type expand: Optional[str]
:rtype: Dict[str, Any]
"""
params = {}
if projectKeys is not None:
params['projectKeys'] = projectKeys
if projectIds is not None:
if isinstance(projectIds, string_types):
projectIds = projectIds.split(',')
params['projectIds'] = projectIds
if issuetypeIds is not None:
params['issuetypeIds'] = issuetypeIds
if issuetypeNames is not None:
params['issuetypeNames'] = issuetypeNames
if expand is not None:
params['expand'] = expand
return self._get_json('issue/createmeta', params)
|
java
|
public void readArray(float[] data) throws IOException {
int size = data.length;
for (int i = 0; i < size; ++i)
data[i] = getFloat();
}
|
python
|
def jiggle_source_code(self): # type: () ->int
"""
Updates version of central package
"""
changed = 0
for file_name in self.file_inventory.source_files:
to_write = []
# self.create_missing(file_name, file_name)
if not os.path.isfile(file_name):
continue
all_source = self.file_opener.read_this(file_name)
if "__version_info__" in all_source:
logger.warning("We have __version_info__ to sync up.")
# raise TypeError()
with self.file_opener.open_this(file_name, "r") as infile:
for line in infile:
leading_white = self.leading_whitespace(line)
version, version_token = dunder_version.find_in_line(line)
if version:
simplified_line = dunder_version.simplify_line(
line, keep_comma=True
)
if simplified_line.strip(" \t\n").endswith(","):
comma = ","
else:
comma = ""
if simplified_line.strip(" \t\n").startswith(","):
start_comma = ","
else:
start_comma = ""
to_write.append(
'{0}{1}{2} = "{3}"{4}{5}\n'.format(
start_comma,
leading_white,
version_token,
unicode(self.version_to_write()),
comma,
self.signature,
)
)
else:
to_write.append(line)
check(self.file_opener.open_this(file_name, "r").read(), "".join(to_write))
with open(file_name, "w") as outfile:
outfile.writelines(to_write)
changed += 1
return changed
|
python
|
def get_monitors(self, condition=None, page_size=1000):
"""Return an iterator over all monitors matching the provided condition
Get all inactive monitors and print id::
for mon in dc.monitor.get_monitors(MON_STATUS_ATTR == "DISABLED"):
print(mon.get_id())
Get all the HTTP monitors and print id::
for mon in dc.monitor.get_monitors(MON_TRANSPORT_TYPE_ATTR == "http"):
print(mon.get_id())
Many other possibilities exist. See the :mod:`devicecloud.condition` documention
for additional details on building compound expressions.
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the monitor that will be retrieved from
Device Cloud. If a condition is unspecified, an iterator over
all monitors for this account will be returned.
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page.
:return: Generator yielding :class:`.DeviceCloudMonitor` instances matching the
provided conditions.
"""
req_kwargs = {}
if condition:
req_kwargs['condition'] = condition.compile()
for monitor_data in self._conn.iter_json_pages("/ws/Monitor", **req_kwargs):
yield DeviceCloudMonitor.from_json(self._conn, monitor_data, self._tcp_client_manager)
|
java
|
protected void processResult(RHI aggregatedResult) {
// Process the result with all result handlers
for (ResultHandler<RHI> resultHandler : resultHandlers) {
resultHandler.handleResult(aggregatedResult);
}
}
|
python
|
def getstate(d):
'''Deep copies a dict, and returns it without the keys _links and
_embedded
'''
if not isinstance(d, dict):
raise TypeError("Can only get the state of a dictionary")
cpd = copy.deepcopy(d)
cpd.pop('_links', None)
cpd.pop('_embedded', None)
return cpd
|
python
|
def partition_version_classifiers(
classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ',
only_suffix: str = ' :: Only') -> t.Tuple[t.List[str], t.List[str]]:
"""Find version number classifiers in given list and partition them into 2 groups."""
versions_min, versions_only = [], []
for classifier in classifiers:
version = classifier.replace(version_prefix, '')
versions = versions_min
if version.endswith(only_suffix):
version = version.replace(only_suffix, '')
versions = versions_only
try:
versions.append(tuple([int(_) for _ in version.split('.')]))
except ValueError:
pass
return versions_min, versions_only
|
java
|
public static byte[] copyBytes(ByteBuffer bytes) {
ByteBuffer copy = bytes.duplicate();
byte[] result = new byte[copy.remaining()]; // bytes between position and limit
copy.get(result);
return result;
}
|
java
|
public void removeNotificationListener(ObjectName name, ObjectName listener, NotificationFilter filter, Object handback) throws InstanceNotFoundException, ListenerNotFoundException {
delegate.removeNotificationListener(name, listener, filter, handback);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.