language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java
|
public int getSpaces(char[] array, int currentIndex, StringBuffer velocityBlock,
VelocityParserContext context)
{
int i = currentIndex;
while (i < array.length && Character.isWhitespace(array[i])) {
++i;
}
if (velocityBlock != null) {
velocityBlock.append(array, currentIndex, i - currentIndex);
}
return i;
}
|
java
|
public void copyStructure( DMatrixSparseCSC orig ) {
reshape(orig.numRows, orig.numCols, orig.nz_length);
this.nz_length = orig.nz_length;
System.arraycopy(orig.col_idx,0,col_idx,0,orig.numCols+1);
System.arraycopy(orig.nz_rows,0,nz_rows,0,orig.nz_length);
}
|
java
|
public void updateTargetCellule(SIBUuid8 targetMEUuid) throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "updateTargetCellule", targetMEUuid);
this.targetMEUuid = targetMEUuid;
sourceStreamManager.updateTargetCellule(targetMEUuid);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "updateTargetCellule");
}
|
python
|
async def set_volume(self, volume: int):
"""
Sets the volume of Lavalink.
Parameters
----------
volume : int
Between 0 and 150
"""
self._volume = max(min(volume, 150), 0)
await self.node.volume(self.channel.guild.id, self.volume)
|
python
|
def weighted_hamming(b1, b2):
"""
Hamming distance that emphasizes differences earlier in strings.
"""
assert(len(b1) == len(b2))
hamming = 0
for i in range(len(b1)):
if b1[i] != b2[i]:
# differences at more significant (leftward) bits
# are more important
if i > 0:
hamming += 1 + 1.0/i
# This weighting is completely arbitrary
return hamming
|
python
|
def _display(port=None, height=None, print_message=False, display_handle=None):
"""Internal version of `display`.
Args:
port: As with `display`.
height: As with `display`.
print_message: True to print which TensorBoard instance was selected
for display (if applicable), or False otherwise.
display_handle: If not None, an IPython display handle into which to
render TensorBoard.
"""
if height is None:
height = 800
if port is None:
infos = manager.get_all()
if not infos:
raise ValueError("Can't display TensorBoard: no known instances running.")
else:
info = max(manager.get_all(), key=lambda x: x.start_time)
port = info.port
else:
infos = [i for i in manager.get_all() if i.port == port]
info = (
max(infos, key=lambda x: x.start_time)
if infos
else None
)
if print_message:
if info is not None:
message = (
"Selecting TensorBoard with {data_source} "
"(started {delta} ago; port {port}, pid {pid})."
).format(
data_source=manager.data_source_from_info(info),
delta=_time_delta_from_info(info),
port=info.port,
pid=info.pid,
)
print(message)
else:
# The user explicitly provided a port, and we don't have any
# additional information. There's nothing useful to say.
pass
fn = {
_CONTEXT_COLAB: _display_colab,
_CONTEXT_IPYTHON: _display_ipython,
_CONTEXT_NONE: _display_cli,
}[_get_context()]
return fn(port=port, height=height, display_handle=display_handle)
|
python
|
def refresh(
self):
"""*Refreshs this project's attributes if, for example, the parent document's projects or tasks has been sorted*
**Usage:**
To refresh the project:
.. code-block:: python
myProject.refresh
"""
if self.parent:
self.parent.refresh
title = self.title
replace = self.parent.get_project(title)
if not replace:
return
self.tags = replace.tags
self.tasks = replace.tasks
self.notes = replace.notes
try:
title = self.title
replace = self.parent.get_project(title)
self.tags = replace.tags
self.tasks = replace.tasks
self.notes = replace.notes
except:
pass
self.projects = replace.projects
try:
self.projects = replace.projects
except:
pass
self.content = replace.to_string(indentLevel=0, title=False)
return
|
java
|
public boolean render(RootDoc rootDoc, DocletRenderer renderer) {
if (!processOverview(rootDoc, renderer)) {
return false;
}
Set<PackageDoc> packages = new HashSet<PackageDoc>();
for (ClassDoc doc : rootDoc.classes()) {
packages.add(doc.containingPackage());
renderClass(doc, renderer);
}
for (PackageDoc doc : packages) {
renderer.renderDoc(doc);
}
return true;
}
|
python
|
def fetch(self, is_dl_forced=False):
"""
For the MGI resource, we connect to the remote database,
and pull the tables into local files.
We'll check the local table versions against the remote version
:return:
"""
# check if config exists; if it doesn't, error out and let user know
if 'dbauth' not in config.get_config() and 'mgi' \
not in config.get_config()['dbauth']:
LOG.error("not configured with PG user/password.")
# create the connection details for MGI
cxn = config.get_config()['dbauth']['mgi']
self.dataset.setFileAccessUrl(''.join((
'jdbc:postgresql://', cxn['host'], ':', str(cxn['port']), '/',
cxn['database'])), is_object_literal=True)
# process the tables
# self.fetch_from_pgdb(self.tables, cxn, 100) # for testing only
# self.fetch_from_pgdb(self.tables, cxn, None, is_dl_forced)
for query_map in self.resources['query_map']:
query_fh = open(os.path.join(
os.path.dirname(__file__), query_map['query']), 'r')
query = query_fh.read()
force = False
if 'Force' in query_map:
force = query_map['Force']
self.fetch_query_from_pgdb(
query_map['outfile'], query, None, cxn, force=force)
# always get this - it has the verion info
self.fetch_transgene_genes_from_db(cxn)
datestamp = ver = None
# get the resource version information from
# table mgi_dbinfo, already fetched above
outfile = '/'.join((self.rawdir, 'mgi_dbinfo'))
if os.path.exists(outfile):
with open(outfile, 'r') as f:
f.readline() # read the header row; skip
info = f.readline()
cols = info.split('\t')
ver = cols[0] # col 0 is public_version
ver = ver.replace('MGI ', '') # MGI 5.20 --> 5.20
# MGI has a datestamp for the data within the database;
# use it instead of the download date
# datestamp in the table: 2014-12-23 00:14:20[.12345]
# modification date without micro seconds
dat = cols[1].strip().split('.')[0]
datestamp = datetime.strptime(
dat, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%d")
f.close()
self.dataset.setVersion(datestamp, ver)
return
|
python
|
def card_names_and_ids(self):
"""Returns [(name, id), ...] pairs of cards from current board"""
b = Board(self.client, self.board_id)
cards = b.getCards()
card_names_and_ids = [(unidecode(c.name), c.id) for c in cards]
return card_names_and_ids
|
python
|
def fetch_contributing_projects(self, **kwargs):
"""
List projects as contributor
Fetch projects that the currently authenticated user has access to because he or she is a contributor.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.fetch_contributing_projects(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: PaginatedProjectResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.fetch_contributing_projects_with_http_info(**kwargs)
else:
(data) = self.fetch_contributing_projects_with_http_info(**kwargs)
return data
|
python
|
def show_observee(self, user_id, observee_id):
"""
Show an observee.
Gets information about an observed user.
*Note:* all users are allowed to view their own observees.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - user_id
"""ID"""
path["user_id"] = user_id
# REQUIRED - PATH - observee_id
"""ID"""
path["observee_id"] = observee_id
self.logger.debug("GET /api/v1/users/{user_id}/observees/{observee_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/users/{user_id}/observees/{observee_id}".format(**path), data=data, params=params, single_item=True)
|
python
|
def get_urls(self, controllers=None, prefix_path=''):
"""
Return a list of all valid urls (minus args and kwargs, just the program paths)
for this manifest. If a single program has two urls, both will be returned.
"""
tag_match = lambda program: set(program.controllers) & set(controllers or [])
urls = set()
for key, value in self.manifest.items():
path = "%s/%s" % (prefix_path, key)
if path.endswith('/') and prefix_path:
path = path[:-1]
if hasattr(value, 'lower'):
# is a string redirect
urls.add(path)
elif isinstance(value, Manifest):
# is manifest
pp = '' if path == '/' else path # for 'stacked' root programs.
new_urls = value.get_urls(controllers=controllers, prefix_path=pp)
urls.update(new_urls)
elif isinstance(value, Program):
# make a list so we can iterate through it in the next `if` block
value = [value]
if hasattr(value, 'append'):
# defined is multiple programs, get the one for this controller tag.
for program in value:
if not program.controllers or not controllers:
# no controllers defined on program. Always add.
# or no tags defined for this get_urls call. Always add.
urls.add(path)
elif tag_match(program):
urls.add(path)
return urls
|
python
|
def main():
""" Read the options given on the command line and do the required actions.
This method is used in the entry_point `cast`.
"""
opts = docopt(__doc__, version="cast 0.1")
cast = pychromecast.PyChromecast(CHROMECAST_HOST)
ramp = cast.get_protocol(pychromecast.PROTOCOL_RAMP)
# Wait for ramp connection to be initted.
time.sleep(SLEEP_TIME)
if ramp is None:
print 'Chromecast is not up or current app does not handle RAMP.'
return 1
if opts['next']:
ramp.next()
elif opts['pause']:
ramp.pause()
elif opts['play']:
ramp.play()
elif opts['toggle']:
ramp.playpause()
elif opts['seek']:
ramp.seek(opts['<second>'])
elif opts['rewind']:
ramp.rewind()
elif opts['status']:
_status_command(cast, ramp)
elif opts['volume']:
_volume_command(ramp, opts['<value>'])
# Wait for command to be sent.
time.sleep(SLEEP_TIME)
|
python
|
def graph(self, node_source, edge_source, layout_provider, **kwargs):
''' Creates a network graph using the given node, edge and layout provider.
Args:
node_source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source
for the graph nodes. An attempt will be made to convert the object to
:class:`~bokeh.models.sources.ColumnDataSource` if needed. If none is supplied, one is created
for the user automatically.
edge_source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source
for the graph edges. An attempt will be made to convert the object to
:class:`~bokeh.models.sources.ColumnDataSource` if needed. If none is supplied, one is created
for the user automatically.
layout_provider (:class:`~bokeh.models.graphs.LayoutProvider`) : a ``LayoutProvider`` instance to
provide the graph coordinates in Cartesian space.
**kwargs: :ref:`userguide_styling_line_properties` and :ref:`userguide_styling_fill_properties`
'''
kw = _graph(node_source, edge_source, **kwargs)
graph_renderer = GraphRenderer(layout_provider=layout_provider, **kw)
self.renderers.append(graph_renderer)
return graph_renderer
|
python
|
def read_snapshots(path, comments="#", directed=False, delimiter=None,
nodetype=None, timestamptype=None, encoding='utf-8', keys=False):
"""Read a DyNetx graph from snapshot graph list format.
Parameters
----------
path : basestring
The desired output filename
delimiter : character
Column delimiter
"""
ids = None
lines = (line.decode(encoding) for line in path)
if keys:
ids = read_ids(path.name, delimiter=delimiter, timestamptype=timestamptype)
return parse_snapshots(lines, comments=comments, directed=directed, delimiter=delimiter, nodetype=nodetype,
timestamptype=timestamptype, keys=ids)
|
java
|
@Override
public ListOperationsResult listOperations(ListOperationsRequest request) {
request = beforeClientExecution(request);
return executeListOperations(request);
}
|
python
|
def subscribe(config, accounts, region, merge, debug):
"""subscribe accounts log groups to target account log group destination"""
config = validate.callback(config)
subscription = config.get('subscription')
if subscription is None:
log.error("config file: logs subscription missing")
sys.exit(1)
def converge_destination_policy(client, config):
destination_name = subscription['destination-arn'].rsplit(':', 1)[-1]
try:
extant_destinations = client.describe_destinations(
DestinationNamePrefix=destination_name).get('destinations')
except ClientError:
log.error("Log group destination not found: %s",
subscription['destination-arn'])
sys.exit(1)
account_ids = set()
for a in accounts:
if isinstance(a['role'], list):
account_ids.add(a['role'][-1].split(':')[4])
else:
account_ids.add(a['role'].split(':')[4])
if merge:
for d in extant_destinations:
if d['destinationName'] == destination_name:
for s in json.loads(d['accessPolicy']):
if s['Sid'] == 'CrossAccountDelivery':
account_ids.update(s['Principal']['AWS'])
client.put_destination_policy(
destinationName=destination_name,
accessPolicy=json.dumps({
'Statement': [{
'Action': 'logs:PutSubscriptionFilter',
'Effect': 'Allow',
'Principal': {'AWS': list(account_ids)},
'Resource': subscription['destination-arn'],
'Sid': 'CrossAccountDelivery'}]}))
def subscribe_account(t_account, subscription, region):
session = get_session(t_account['role'], region)
client = session.client('logs')
distribution = subscription.get('distribution', 'ByLogStream')
for g in account.get('groups'):
if (g.endswith('*')):
g = g.replace('*', '')
paginator = client.get_paginator('describe_log_groups')
allLogGroups = paginator.paginate(logGroupNamePrefix=g).build_full_result()
for l in allLogGroups:
_process_subscribe_group(
client, l['logGroupName'], subscription, distribution)
else:
_process_subscribe_group(client, g, subscription, distribution)
if subscription.get('managed-policy'):
if subscription.get('destination-role'):
session = get_session(subscription['destination-role'], region)
else:
session = boto3.Session()
converge_destination_policy(session.client('logs'), config)
executor = debug and MainThreadExecutor or ThreadPoolExecutor
with executor(max_workers=32) as w:
futures = {}
for account in config.get('accounts', ()):
if accounts and account['name'] not in accounts:
continue
futures[w.submit(subscribe_account, account, subscription, region)] = account
for f in as_completed(futures):
account = futures[f]
if f.exception():
log.error("Error on account %s err: %s",
account['name'], f.exception())
log.info("Completed %s", account['name'])
|
python
|
def _parse_create_args(client, args):
"""Converts CLI arguments to args for VSManager.create_instance.
:param dict args: CLI arguments
"""
data = {
"hourly": args.get('billing', 'hourly') == 'hourly',
"cpus": args.get('cpu', None),
"ipv6": args.get('ipv6', None),
"disks": args.get('disk', None),
"os_code": args.get('os', None),
"memory": args.get('memory', None),
"flavor": args.get('flavor', None),
"domain": args.get('domain', None),
"host_id": args.get('host_id', None),
"private": args.get('private', None),
"hostname": args.get('hostname', None),
"nic_speed": args.get('network', None),
"boot_mode": args.get('boot_mode', None),
"dedicated": args.get('dedicated', None),
"post_uri": args.get('postinstall', None),
"datacenter": args.get('datacenter', None),
"public_vlan": args.get('vlan_public', None),
"private_vlan": args.get('vlan_private', None),
"public_subnet": args.get('subnet_public', None),
"private_subnet": args.get('subnet_private', None),
}
# The primary disk is included in the flavor and the local_disk flag is not needed
# Setting it to None prevents errors from the flag not matching the flavor
if not args.get('san') and args.get('flavor'):
data['local_disk'] = None
else:
data['local_disk'] = not args.get('san')
if args.get('image'):
if args.get('image').isdigit():
image_mgr = SoftLayer.ImageManager(client)
image_details = image_mgr.get_image(args.get('image'), mask="id,globalIdentifier")
data['image_id'] = image_details['globalIdentifier']
else:
data['image_id'] = args['image']
if args.get('userdata'):
data['userdata'] = args['userdata']
elif args.get('userfile'):
with open(args['userfile'], 'r') as userfile:
data['userdata'] = userfile.read()
# Get the SSH keys
if args.get('key'):
keys = []
for key in args.get('key'):
resolver = SoftLayer.SshKeyManager(client).resolve_ids
key_id = helpers.resolve_id(resolver, key, 'SshKey')
keys.append(key_id)
data['ssh_keys'] = keys
if args.get('public_security_group'):
pub_groups = args.get('public_security_group')
data['public_security_groups'] = [group for group in pub_groups]
if args.get('private_security_group'):
priv_groups = args.get('private_security_group')
data['private_security_groups'] = [group for group in priv_groups]
if args.get('tag', False):
data['tags'] = ','.join(args['tag'])
if args.get('host_id'):
data['host_id'] = args['host_id']
if args.get('placementgroup'):
resolver = SoftLayer.managers.PlacementManager(client).resolve_ids
data['placement_id'] = helpers.resolve_id(resolver, args.get('placementgroup'), 'PlacementGroup')
return data
|
java
|
public void logp(Level level,
String sourceClass,
String sourceMethod,
String msg,
Object params1,
Object params2,
Object params3
)
{
logp(level,sourceClass, sourceMethod, msg, new Object[] {params1, params2, params3});
}
|
python
|
def deactivate(self, asset_manager_id):
"""
Is is only possible to deactivate an asset manager if your client_id is also the client_id that was used
to originally create the asset manager.
:param asset_manager_id:
:return:
"""
self.logger.info('Deactivate Asset Manager: %s', asset_manager_id)
url = '%s/asset-managers/%s' % (self.endpoint, asset_manager_id)
response = self.session.delete(url)
if response.ok:
self.logger.info('Successfully deactivated Asset Manager: %s', asset_manager_id)
return json_to_asset_manager(response.json())
else:
self.logger.error(response.text)
response.raise_for_status()
|
python
|
def create_class(self, method):
"""
Build the estimator class.
Returns
-------
:return : string
The built class as string.
"""
temp_type = self.temp('type')
temp_arr = self.temp('arr')
temp_arr_ = self.temp('arr[]')
temp_arr__ = self.temp('arr[][]')
# Samples:
temps = []
for atts in enumerate(self.estimator._fit_X): # pylint: disable=W0212
tmp = [temp_type.format(self.repr(a)) for a in atts[1]]
tmp = temp_arr.format(', '.join(tmp))
temps.append(tmp)
temps = ', '.join(temps)
temps = temp_arr__.format(type='double', name='X', values=temps,
n=self.n_templates, m=self.n_features)
# Classes:
classes = self.estimator._y # pylint: disable=W0212
classes = [temp_type.format(int(c)) for c in classes]
classes = ', '.join(classes)
classes = temp_arr_.format(type='int', name='y', values=classes,
n=self.n_templates)
temp_class = self.temp('separated.class')
return temp_class.format(class_name=self.class_name,
method_name=self.method_name, method=method,
n_features=self.n_features, X=temps, y=classes,
n_neighbors=self.n_neighbors,
n_templates=self.n_templates,
n_classes=self.n_classes,
power=self.power_param)
|
java
|
@Override
public void deleteByColumn(String schemaName, String tableName, String columnName, Object columnValue)
{
Object connection = null;
Pipeline pipeLine = null;
try
{
connection = getConnection();
if (isBoundTransaction())
{
pipeLine = ((Jedis) connection).pipelined();
}
String valueAsStr = PropertyAccessorHelper.getString(columnValue);
Double score = getDouble(valueAsStr);
Set<String> results = null;
if (resource != null && resource.isActive())
{
Response response = ((Transaction) connection).zrangeByScore(getHashKey(tableName, valueAsStr), score,
score);
// ((Transaction) connection).exec();
((RedisTransaction) resource).onExecute(((Transaction) connection));
results = (Set<String>) response.get();
}
else
{
results = ((Jedis) connection).zrangeByScore(getHashKey(tableName, valueAsStr), score, score);
}
// Set<String> results =
// connection.zrangeByScore(getHashKey(tableName, valueAsStr),
// score, score);
if (results != null)
{
for (String rowKey : results)
{
// byte[] hashKey = getEncodedBytes(getHashKey(tableName,
// rowKey));
Map<byte[], byte[]> columns = null;
columns = getColumns(connection, rowKey, columns);
for (byte[] column : columns.keySet()) // delete each
// column(e.g.
// field)
{
// connection.get(key)
String colName = PropertyAccessorFactory.STRING.fromBytes(String.class, columns.get(column));
if (resource != null && resource.isActive())
{
((Transaction) connection).hdel(getEncodedBytes(rowKey), column); // delete
// record
((Transaction) connection).zrem(getHashKey(tableName, colName), rowKey); // delete
// inverted
// index.
}
else
{
((Jedis) connection).hdel(getEncodedBytes(rowKey), column); // delete
// record
((Jedis) connection).zrem(getHashKey(tableName, colName), rowKey); // delete
// inverted
// index.
}
}
}
}
}
finally
{
if (pipeLine != null)
{
pipeLine.sync();
}
onCleanup(connection);
}
}
|
java
|
@Override
public String incomingClearText(String st) {
StringBuffer sb = new StringBuffer(st);
int idx = sb.indexOf("'IIDIA:");
if (idx != -1) {
int idx2 = sb.indexOf("'", idx + 1);
log.warn("removing invalid segment '" + sb.substring(idx + 1, idx2 + 1) + "'");
sb.delete(idx + 1, idx2 + 1);
}
return sb.toString();
}
|
python
|
def close(scope):
"""
Closes the existing connection with the remote host. This function is
rarely used, as normally Exscript closes the connection automatically
when the script has completed.
"""
conn = scope.get('__connection__')
conn.close(1)
scope.define(__response__=conn.response)
return True
|
java
|
public ResponseInfo syncPut(File file, String key, String token, UploadOptions options) {
final UpToken decodedToken = UpToken.parse(token);
ResponseInfo info = areInvalidArg(key, null, file, token, decodedToken);
if (info != null) {
return info;
}
return FormUploader.syncUpload(client, config, file, key, decodedToken, options);
}
|
java
|
public boolean authenticate(String pUser, PublicKey pKey, ServerSession pSession) {
return user != null && user.equals(pUser) && allowedKey.equals(pKey);
}
|
java
|
public void marshall(ListFileSharesRequest listFileSharesRequest, ProtocolMarshaller protocolMarshaller) {
if (listFileSharesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listFileSharesRequest.getGatewayARN(), GATEWAYARN_BINDING);
protocolMarshaller.marshall(listFileSharesRequest.getLimit(), LIMIT_BINDING);
protocolMarshaller.marshall(listFileSharesRequest.getMarker(), MARKER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
java
|
@SuppressWarnings("unused") // called through reflection by RequestServer
public ModelsV3 delete(int version, ModelsV3 s) {
Model model = getFromDKV("key", s.model_id.key());
model.delete(); // lock & remove
return s;
}
|
python
|
async def crunch(image_data, format, silent=False):
""" Crunch image data, and return the processed data, or orignal data if operation failed. """
if (((format is CoverImageFormat.PNG) and (not HAS_OPTIPNG)) or
((format is CoverImageFormat.JPEG) and (not HAS_JPEGOPTIM))):
return image_data
with mkstemp_ctx.mkstemp(suffix=".%s" % (format.name.lower())) as tmp_out_filepath:
if not silent:
logging.getLogger("Cover").info("Crunching %s image..." % (format.name.upper()))
with open(tmp_out_filepath, "wb") as tmp_out_file:
tmp_out_file.write(image_data)
size_before = len(image_data)
if format is CoverImageFormat.PNG:
cmd = ["optipng", "-quiet", "-o1"]
elif format is CoverImageFormat.JPEG:
cmd = ["jpegoptim", "-q", "--strip-all"]
cmd.append(tmp_out_filepath)
p = await asyncio.create_subprocess_exec(*cmd,
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.DEVNULL,
stderr=asyncio.subprocess.DEVNULL)
await p.wait()
if p.returncode != 0:
if not silent:
logging.getLogger("Cover").warning("Crunching image failed")
return image_data
with open(tmp_out_filepath, "rb") as tmp_out_file:
crunched_image_data = tmp_out_file.read()
size_after = len(crunched_image_data)
pct_saved = 100 * (size_before - size_after) / size_before
if not silent:
logging.getLogger("Cover").debug("Crunching image saved %.2f%% filesize" % (pct_saved))
return crunched_image_data
|
java
|
public static <K, V> RedisTemplate<K, V> newRedisTemplate(final RedisConnectionFactory connectionFactory) {
val template = new RedisTemplate<K, V>();
val string = new StringRedisSerializer();
val jdk = new JdkSerializationRedisSerializer();
template.setKeySerializer(string);
template.setValueSerializer(jdk);
template.setHashValueSerializer(jdk);
template.setHashKeySerializer(string);
template.setConnectionFactory(connectionFactory);
return template;
}
|
java
|
public D plus(CalendarDays days) {
long result = Math.addExact(this.getDaysSinceEpochUTC(), days.getAmount());
try {
return this.getChronology().getCalendarSystem().transform(result);
} catch (IllegalArgumentException iae) {
ArithmeticException ex = new ArithmeticException("Out of range: " + result);
ex.initCause(iae);
throw ex;
}
}
|
python
|
def mag_rotation(RAW_IMU, inclination, declination):
'''return an attitude rotation matrix that is consistent with the current mag
vector'''
m_body = Vector3(RAW_IMU.xmag, RAW_IMU.ymag, RAW_IMU.zmag)
m_earth = Vector3(m_body.length(), 0, 0)
r = Matrix3()
r.from_euler(0, -radians(inclination), radians(declination))
m_earth = r * m_earth
r.from_two_vectors(m_earth, m_body)
return r
|
python
|
def _parse_migrations(self):
"""Build a :class:`Migration` instance."""
migration = self.parsed['migration']
options = self._parse_options(migration)
versions = self._parse_versions(migration, options)
return Migration(versions, options)
|
python
|
def sys_call(cmd):
"""Execute cmd and capture stdout and stderr
:param cmd: command to be executed
:return: (stdout, stderr)
"""
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
return p.stdout.readlines(), p.stderr.readlines()
|
java
|
public void endpointDeactivation(MessageEndpointFactory mef, ActivationSpec as) throws Exception
{
if (mef == null)
throw new Exception("MessageEndpointFactory is null");
if (as == null)
throw new Exception("ActivationSpec is null");
Endpoint e = new Endpoint(mef, as);
InflowRecovery ir = activeEndpoints.get(e);
if (ir != null)
ir.deactivate();
try
{
resourceAdapter.endpointDeactivation(mef, as);
}
finally
{
activeEndpoints.remove(e);
}
}
|
java
|
public static List<? extends Object> getObjectList(Config config, String path) {
try {
return config.getAnyRefList(path);
} catch (ConfigException.Missing | ConfigException.WrongType e) {
if (e instanceof ConfigException.WrongType) {
LOGGER.warn(e.getMessage(), e);
}
return null;
}
}
|
python
|
def repmc(instr, marker, value, lenout=None):
"""
Replace a marker with a character string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmc_c.html
:param instr: Input string.
:type instr: str
:param marker: Marker to be replaced.
:type marker: str
:param value: Replacement value.
:type value: str
:param lenout: Optional available space in output string
:type lenout: int
:return: Output string.
:rtype: str
"""
if lenout is None:
lenout = ctypes.c_int(len(instr) + len(value) + len(marker) + 15)
instr = stypes.stringToCharP(instr)
marker = stypes.stringToCharP(marker)
value = stypes.stringToCharP(value)
out = stypes.stringToCharP(lenout)
libspice.repmc_c(instr, marker, value, lenout, out)
return stypes.toPythonString(out)
|
java
|
@Indexable(type = IndexableType.DELETE)
@Override
public CommerceNotificationTemplate deleteCommerceNotificationTemplate(
long commerceNotificationTemplateId) throws PortalException {
return commerceNotificationTemplatePersistence.remove(commerceNotificationTemplateId);
}
|
python
|
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
|
python
|
def raw(self):
"""
An eager version of Trace
Returns
-------
raw : RawTrace
"""
return RawTrace(self.filehandle,
self.dtype,
len(self),
self.shape,
self.readonly,
)
|
python
|
def add_xtalographic_info(data_api, struct_inflator):
""" Add the crystallographic data to the structure.
:param data_api the interface to the decoded data
:param struct_inflator the interface to put the data into the client object"""
if data_api.unit_cell == None and data_api.space_group is not None:
struct_inflator.set_xtal_info(data_api.space_group,
constants.UNKNOWN_UNIT_CELL)
elif data_api.unit_cell is not None and data_api.space_group is None:
struct_inflator.set_xtal_info(constants.UNKNOWN_SPACE_GROUP,
data_api.unit_cell)
elif data_api.unit_cell is None and data_api.space_group is None:
struct_inflator.set_xtal_info(constants.UNKNOWN_SPACE_GROUP,
constants.UNKNOWN_UNIT_CELL)
else:
struct_inflator.set_xtal_info(data_api.space_group,
data_api.unit_cell)
|
python
|
def override_environment(settings, **kwargs):
# type: (Settings, **str) -> Generator
"""
Override env vars and reload the Settings object
NOTE:
Obviously this context has to be in place before you import any
module which reads env values at import time.
NOTE:
The values in `kwargs` must be strings else you will get a cryptic:
TypeError: execve() arg 3 contains a non-string value
"""
old_env = os.environ.copy()
os.environ.update(kwargs)
settings._reload()
try:
yield
except Exception:
raise
finally:
for key in kwargs.keys():
del os.environ[key]
os.environ.update(old_env)
settings._reload()
|
java
|
public Configuration createConfiguration(KNXNetworkLink link)
{
final ConfigImpl c = new ConfigImpl(link);
configs.add(c);
logger.info("created configuration for " + link.getName());
return c;
}
|
java
|
public static long genSeed(final String ident) {
try {
MessageDigest instance = MessageDigest.getInstance("SHA-1");
byte[] strbytes = ident.getBytes("UTF-8");
byte[] digest = instance.digest(strbytes);
//use first 8 bytes as a long
return ByteBuffer.wrap(digest, 0, Long.BYTES).getLong();
} catch (NoSuchAlgorithmException | UnsupportedEncodingException e) {
//not random looking
return ident.hashCode();
}
}
|
java
|
private void processCustomValueLists() throws IOException
{
CustomFieldValueReader9 reader = new CustomFieldValueReader9(m_projectDir, m_file.getProjectProperties(), m_projectProps, m_file.getCustomFields());
reader.process();
}
|
java
|
public static void cancelAll(Context context,
Class<? extends GroundyService> groundyServiceClass) {
new GroundyServiceConnection(context, groundyServiceClass) {
@Override
protected void onGroundyServiceBound(GroundyService.GroundyServiceBinder binder) {
binder.cancelAllTasks();
}
}.start();
}
|
java
|
public static ExtractionFn fromQueryGranularity(final Granularity queryGranularity)
{
if (queryGranularity == null) {
return null;
} else {
return new TimeFormatExtractionFn(null, null, null, queryGranularity, true);
}
}
|
java
|
static String[] toArray(JsArrayString values) {
if (GWT.isScript()) {
return reinterpretCast(values);
} else {
int length = values.length();
String[] ret = new String[length];
for (int i = 0, l = length; i < l; i++) {
ret[i] = values.get(i);
}
return ret;
}
}
|
python
|
def __delete_directory(self, directory):
"""
Deletes given directory.
:param directory: Directory to delete.
:type directory: unicode
"""
for node in itertools.chain(self.__script_editor.model.get_project_nodes(directory),
self.__script_editor.model.get_directory_nodes(directory)):
self.__script_editor.model.unregister_project_nodes(node)
if node.family == "Directory":
self.__script_editor.model.unregister_project_nodes(node)
self.__script_editor.model.unregister_directory(node)
elif node.family == "Project":
self.__script_editor.remove_project(directory)
self.__delete_path(directory)
|
java
|
@Pure
@Inline(value="($1 != $2)", constantExpression=true)
public static boolean operator_notEquals(char a, int b) {
return a != b;
}
|
java
|
private void generateBuilderClass() {
if (data.isSkipBuilderGeneration()) {
return;
}
List<PropertyGen> nonDerived = nonDerivedProperties();
generateSeparator();
String finalType = data.isTypeFinal() ? "final " : "";
addLine(1, "/**");
addLine(1, " * The bean-builder for {@code " + data.getTypeRaw() + "}.");
if (data.isTypeGeneric()) {
for (int j = 0; j < data.getTypeGenericCount(); j++) {
addLine(1, " * @param " + data.getTypeGenericName(j, true) + " the type");
}
}
addLine(1, " */");
String superBuilder;
if (data.isSubClass()) {
superBuilder = data.getSuperTypeRaw() + ".Builder" + data.getSuperTypeGeneric(true);
} else if (data.isEffectiveBuilderScopeVisible()) {
data.ensureImport(DirectFieldsBeanBuilder.class);
superBuilder = "DirectFieldsBeanBuilder<" + data.getTypeNoExtends() + ">";
} else {
data.ensureImport(DirectPrivateBeanBuilder.class);
superBuilder = "DirectPrivateBeanBuilder<" + data.getTypeNoExtends() + ">";
}
if (data.isConstructable()) {
addLine(1, data.getEffectiveBuilderScope() + "static " + finalType +
"class Builder" + data.getTypeGeneric(true) + " extends " + superBuilder + " {");
} else {
addLine(1, data.getEffectiveBuilderScope() + "abstract static " + finalType +
"class Builder" + data.getTypeGeneric(true) + " extends " + superBuilder + " {");
}
if (nonDerived.size() > 0) {
addBlankLine();
generateBuilderProperties();
}
addBlankLine();
generateBuilderConstructorNoArgs();
generateBuilderConstructorCopy();
generateIndentedSeparator();
generateBuilderGet();
generateBuilderSet();
generateBuilderOtherSets();
if (data.isConstructable()) {
generateBuilderBuild();
}
generateIndentedSeparator();
generateBuilderPropertySetMethods();
generateIndentedSeparator();
generateBuilderToString();
addLine(1, "}");
addBlankLine();
}
|
python
|
def plot(figsize=None, formats=None, limit=100, titlelen=10, **kwargs):
"""Display an image [in a Jupyter Notebook] from a Quilt fragment path.
Intended for use with `%matplotlib inline`.
Convenience method that loops over supblots that call
`plt.imshow(image.imread(FRAG_PATH))`.
Keyword arguments
* figsize=None # None means auto, else provide (HEIGHT_INCHES, WIDTH_INCHES)
* formats=None # List of extensions as strings ['jpg', 'png', ...]
* limit=100 # maximum number of images to display
* titlelen=10 # max number of characters in subplot title
* **kwargs - all remaining kwargs are passed to plt.subplots;
see https://matplotlib.org/api/_as_gen/matplotlib.pyplot.subplots.html
"""
# pylint: disable=protected-access
def _plot(node, paths):
lower_formats = set((x.lower() for x in formats)) if formats is not None else None
def node_filter(frag, meta):
filepath = meta.get('_system', {}).get('filepath', None)
# don't try to read DataFrames as images
if isinstance(frag, string_types) and filepath:
_, ext = splitext_no_dot(filepath)
if lower_formats is None or ext.lower() in lower_formats:
return True
return False
# assume DataNode has one path; doesn't work with multi-fragment images
display = [('', paths[0], node._meta)]
# for GroupNodes, display all DataNode children
if isinstance(node, GroupNode):
datanodes = [(x, y) for (x, y) in node._items() if isinstance(y, DataNode)]
display = [(x, y._data(), y._meta) for (x, y) in datanodes]
# sort by name so iteration is reproducible (and unit tests pass)
display = sorted(display, key=lambda rec: rec[0])
display = [x for x in display if node_filter(x[1], x[2])]
if len(display) > limit:
print('Displaying {} of {} images{}'.format(limit, len(display), ELLIPSIS))
display = display[:limit]
# display can be empty e.g. if no DataNode children
if not display:
print('No images to display.')
return
# cast to int to avoid downstream complaints of
# 'float' object cannot be interpreted as an index
floatlen = float(len(display)) # prevent integer division in 2.7
cols = min(int(floor(sqrt(floatlen))), 8)
rows = int(ceil(floatlen/cols))
plt.tight_layout()
plt.subplots(
rows,
cols,
figsize=(cols*2, rows*2) if not figsize else figsize,
**kwargs)
for i in range(rows*cols):
axes = plt.subplot(rows, cols, i + 1) # subplots start at 1, not 0
axes.axis('off')
if i < len(display):
(name, frag, meta) = display[i]
plt.title(name[:titlelen] + ELLIPSIS if len(name) > titlelen else name)
filepath = meta.get('_system', {}).get('filepath', None)
_, ext = splitext_no_dot(filepath)
try:
bits = mpimg.imread(frag, format=ext)
plt.imshow(bits)
# Mac throws OSError, Linux IOError if file not recognizable
except (IOError, OSError) as err:
print('{}: {}'.format(name, str(err)))
continue
return _plot
|
java
|
public void marshall(GetApplicationPolicyRequest getApplicationPolicyRequest, ProtocolMarshaller protocolMarshaller) {
if (getApplicationPolicyRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getApplicationPolicyRequest.getApplicationId(), APPLICATIONID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def disaggregate_radiation(data_daily,
sun_times=None,
pot_rad=None,
method='pot_rad',
angstr_a=0.25,
angstr_b=0.5,
bristcamp_a=0.75,
bristcamp_c=2.4,
mean_course=None):
"""general function for radiation disaggregation
Args:
daily_data: daily values
sun_times: daily dataframe including results of the util.sun_times function
pot_rad: hourly dataframe including potential radiation
method: keyword specifying the disaggregation method to be used
angstr_a: parameter a of the Angstrom model (intercept)
angstr_b: parameter b of the Angstrom model (slope)
mean_course: monthly values of the mean hourly radiation course
Returns:
Disaggregated hourly values of shortwave radiation.
"""
# check if disaggregation method has a valid value
if method not in ('pot_rad', 'pot_rad_via_ssd', 'pot_rad_via_bc', 'mean_course'):
raise ValueError('Invalid option')
glob_disagg = pd.Series(index=melodist.util.hourly_index(data_daily.index))
if method == 'mean_course':
assert mean_course is not None
pot_rad = pd.Series(index=glob_disagg.index)
pot_rad[:] = mean_course.unstack().loc[list(zip(pot_rad.index.month, pot_rad.index.hour))].values
else:
assert pot_rad is not None
pot_rad_daily = pot_rad.resample('D').mean()
if method in ('pot_rad', 'mean_course'):
globalrad = data_daily.glob
elif method == 'pot_rad_via_ssd':
# in this case use the Angstrom model
globalrad = pd.Series(index=data_daily.index, data=0.)
dates = sun_times.index[sun_times.daylength > 0] # account for polar nights
globalrad[dates] = angstroem(data_daily.ssd[dates], sun_times.daylength[dates],
pot_rad_daily[dates], angstr_a, angstr_b)
elif method == 'pot_rad_via_bc':
# using data from Bristow-Campbell model
globalrad = bristow_campbell(data_daily.tmin, data_daily.tmax, pot_rad_daily, bristcamp_a, bristcamp_c)
globalrad_equal = globalrad.reindex(pot_rad.index, method='ffill') # hourly values (replicate daily mean value for each hour)
pot_rad_daily_equal = pot_rad_daily.reindex(pot_rad.index, method='ffill')
glob_disagg = pot_rad / pot_rad_daily_equal * globalrad_equal
glob_disagg[glob_disagg < 1e-2] = 0.
return glob_disagg
|
java
|
public static MetricRequest fromProtobuf(TopologyMaster.MetricRequest request) {
String componentName = request.getComponentName();
Map<String, Set<String>> componentNameInstanceId = new HashMap<>();
if (request.getInstanceIdCount() == 0) {
// empty list means all instances
// 'null' means all instances
componentNameInstanceId.put(componentName, null);
} else {
Set<String> instances = new HashSet<>();
// only one component
componentNameInstanceId.put(componentName, instances);
// if there are instances specified
instances.addAll(request.getInstanceIdList());
}
Set<String> metricNames = new HashSet<>();
if (request.getMetricCount() > 0) {
metricNames.addAll(request.getMetricList());
} // empty list means no metrics
// default: the whole time horizon
long startTime = 0;
long endTime = Long.MAX_VALUE;
if (request.hasInterval()) { // endTime = now
endTime = System.currentTimeMillis();
long interval = request.getInterval(); // in seconds
if (interval <= 0) { // means all
startTime = 0;
} else { // means [-interval, now]
startTime = endTime - interval * 1000;
}
} else {
startTime = request.getExplicitInterval().getStart() * 1000;
endTime = request.getExplicitInterval().getEnd() * 1000;
}
// default: aggregate all metrics
MetricGranularity aggregationGranularity = AGGREGATE_ALL_METRICS;
if (request.hasMinutely() && request.getMinutely()) {
aggregationGranularity = AGGREGATE_BY_BUCKET;
}
return new MetricRequest(componentNameInstanceId, metricNames,
startTime, endTime, aggregationGranularity);
}
|
java
|
public Observable<BackupLongTermRetentionVaultInner> getAsync(String resourceGroupName, String serverName) {
return getWithServiceResponseAsync(resourceGroupName, serverName).map(new Func1<ServiceResponse<BackupLongTermRetentionVaultInner>, BackupLongTermRetentionVaultInner>() {
@Override
public BackupLongTermRetentionVaultInner call(ServiceResponse<BackupLongTermRetentionVaultInner> response) {
return response.body();
}
});
}
|
python
|
def GetDisplayNameForPathSpec(
cls, path_spec, mount_path=None, text_prepend=None):
"""Retrieves the display name of a path specification.
Args:
path_spec (dfvfs.PathSpec): path specification.
mount_path (Optional[str]): path where the file system that is used
by the path specification is mounted, such as "/mnt/image". The
mount path will be stripped from the absolute path defined by
the path specification.
text_prepend (Optional[str]): text to prepend.
Returns:
str: human readable version of the path specification or None.
"""
if not path_spec:
return None
relative_path = cls.GetRelativePathForPathSpec(
path_spec, mount_path=mount_path)
if not relative_path:
return path_spec.type_indicator
if text_prepend:
relative_path = '{0:s}{1:s}'.format(text_prepend, relative_path)
parent_path_spec = path_spec.parent
if parent_path_spec and path_spec.type_indicator in (
dfvfs_definitions.TYPE_INDICATOR_BZIP2,
dfvfs_definitions.TYPE_INDICATOR_GZIP):
parent_path_spec = parent_path_spec.parent
if parent_path_spec and parent_path_spec.type_indicator == (
dfvfs_definitions.TYPE_INDICATOR_VSHADOW):
store_index = getattr(path_spec.parent, 'store_index', None)
if store_index is not None:
return 'VSS{0:d}:{1:s}:{2:s}'.format(
store_index + 1, path_spec.type_indicator, relative_path)
return '{0:s}:{1:s}'.format(path_spec.type_indicator, relative_path)
|
java
|
public DateFormat getDateTimeFormat(int dateStyle, int timeStyle, ULocale loc) {
return formatHelper(this, loc, dateStyle, timeStyle);
}
|
java
|
public final AbstractItem removeIfMatches(final Filter filter, PersistentTransaction transaction) throws ProtocolException, TransactionException, SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "removeIfMatches", new Object[] { filter, transaction });
AbstractItem foundItem = cmdRemoveIfMatches(filter, transaction);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "removeIfMatches", foundItem);
return foundItem;
}
|
java
|
public LocalDate getInternalLocalDate(ColumnInformation columnInfo, TimeZone timeZone)
throws SQLException {
if (lastValueWasNull()) {
return null;
}
if (length == 0) {
lastValueNull |= BIT_LAST_FIELD_NULL;
return null;
}
String raw = new String(buf, pos, length, StandardCharsets.UTF_8);
switch (columnInfo.getColumnType().getSqlType()) {
case Types.DATE:
case Types.VARCHAR:
case Types.LONGVARCHAR:
case Types.CHAR:
if (raw.startsWith("0000-00-00")) {
return null;
}
try {
return LocalDate
.parse(raw, DateTimeFormatter.ISO_LOCAL_DATE.withZone(timeZone.toZoneId()));
} catch (DateTimeParseException dateParserEx) {
throw new SQLException(
raw + " cannot be parse as LocalDate (format is \"yyyy-MM-dd\" for data type \""
+ columnInfo.getColumnType() + "\")");
}
case Types.TIMESTAMP:
ZonedDateTime zonedDateTime = getInternalZonedDateTime(columnInfo, LocalDate.class,
timeZone);
return zonedDateTime == null ? null
: zonedDateTime.withZoneSameInstant(ZoneId.systemDefault()).toLocalDate();
default:
throw new SQLException(
"Cannot read LocalDate using a " + columnInfo.getColumnType().getJavaTypeName()
+ " field");
}
}
|
python
|
def get_cover_image(self, output_file_path=None, scope='profile/public'):
"""
Retrieve the Mxit user's cover image
No user authentication required
"""
data = _get(
token=self.oauth.get_user_token(scope),
uri='/user/cover'
)
if output_file_path:
with open(output_file_path, 'w') as f:
f.write(data)
else:
return data
|
python
|
def walk(self, listener):
"""Walk the parse tree, using the given listener. The listener
should be a
stix2patterns.grammars.STIXPatternListener.STIXPatternListener (or
subclass) instance."""
antlr4.ParseTreeWalker.DEFAULT.walk(listener, self.__parse_tree)
|
python
|
def parse_fragment(lexer: Lexer) -> Union[FragmentSpreadNode, InlineFragmentNode]:
"""Corresponds to both FragmentSpread and InlineFragment in the spec.
FragmentSpread: ... FragmentName Directives?
InlineFragment: ... TypeCondition? Directives? SelectionSet
"""
start = lexer.token
expect_token(lexer, TokenKind.SPREAD)
has_type_condition = expect_optional_keyword(lexer, "on")
if not has_type_condition and peek(lexer, TokenKind.NAME):
return FragmentSpreadNode(
name=parse_fragment_name(lexer),
directives=parse_directives(lexer, False),
loc=loc(lexer, start),
)
return InlineFragmentNode(
type_condition=parse_named_type(lexer) if has_type_condition else None,
directives=parse_directives(lexer, False),
selection_set=parse_selection_set(lexer),
loc=loc(lexer, start),
)
|
java
|
public CmsUser readOwner(CmsDbContext dbc, CmsProject project) throws CmsException {
return readUser(dbc, project.getOwnerId());
}
|
java
|
public void ip_mitigationProfiles_ipMitigationProfile_PUT(String ip, String ipMitigationProfile, OvhMitigationProfile body) throws IOException {
String qPath = "/ip/{ip}/mitigationProfiles/{ipMitigationProfile}";
StringBuilder sb = path(qPath, ip, ipMitigationProfile);
exec(qPath, "PUT", sb.toString(), body);
}
|
java
|
public Selector getSelector(String selector)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
tc.entry(cclass,
"getSelector",
"selector: " + selector);
// Start by parsing the selector expression (in-line for the moment)
selOperands = new ArrayList();
// We'll Construct a selector tree
Selector selectorTree = null;
try
{
parseSelector(selector);
Selector[] selectorArray = new Selector[selOperands.size()];
selOperands.toArray(selectorArray);
// Iterate over the nodes that were passed into us
for(int i = selectorArray.length - 1 ; i>=0; i--)
{
Selector parsedSelector = (Selector)selectorArray[i];
if(selectorTree == null)
selectorTree = parsedSelector;
else
selectorTree = matching.createExtensionOperator(Selector.AND, parsedSelector, selectorTree);
}
}
catch(InvalidXPathSyntaxException iex)
{
// No FFDC Code Needed.
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
tc.debug(this,cclass, "getSelector", iex);
// Attempt to parse the entire selector expression. If that fails, then
// we really can despair of handling this expression.
selectorTree = parseWholeSelector(selector);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
tc.exit(this,cclass, "getSelector", selectorTree);
return selectorTree;
}
|
python
|
def res_1to1(pst,logger=None,filename=None,plot_hexbin=False,histogram=False,**kwargs):
""" make 1-to-1 plots and also observed vs residual by observation group
Parameters
----------
pst : pyemu.Pst
logger : Logger
if None, a generic one is created. Default is None
filename : str
PDF filename to save figures to. If None, figures are returned. Default is None
kwargs : dict
optional keyword args to pass to plotting functions
TODO: color symbols by weight
"""
if logger is None:
logger=Logger('Default_Loggger.log',echo=False)
logger.log("plot res_1to1")
if "ensemble" in kwargs:
try:
res=pst_utils.res_from_en(pst,kwargs['ensemble'])
except Exception as e:
logger.lraise("res_1to1: error loading ensemble file: {0}".format( str(e)))
else:
try:
res = pst.res
except:
logger.lraise("res_phi_pie: pst.res is None, couldn't find residuals file")
obs = pst.observation_data
if "grouper" in kwargs:
raise NotImplementedError()
else:
grouper = obs.groupby(obs.obgnme).groups
fig = plt.figure(figsize=figsize)
if "fig_title" in kwargs:
plt.figtext(0.5,0.5,kwargs["fig_title"])
else:
plt.figtext(0.5, 0.5, "pyemu.Pst.plot(kind='1to1')\nfrom pest control file '{0}'\n at {1}"
.format(pst.filename, str(datetime.now())), ha="center")
#if plot_hexbin:
# pdfname = pst.filename.replace(".pst", ".1to1.hexbin.pdf")
#else:
# pdfname = pst.filename.replace(".pst", ".1to1.pdf")
figs = []
ax_count = 0
for g, names in grouper.items():
logger.log("plotting 1to1 for {0}".format(g))
obs_g = obs.loc[names, :]
obs_g.loc[:, "sim"] = res.loc[names, "modelled"]
logger.statement("using control file obsvals to calculate residuals")
obs_g.loc[:,'res'] = obs_g.sim - obs_g.obsval
if "include_zero" not in kwargs or kwargs["include_zero"] is True:
obs_g = obs_g.loc[obs_g.weight > 0, :]
if obs_g.shape[0] == 0:
logger.statement("no non-zero obs for group '{0}'".format(g))
logger.log("plotting 1to1 for {0}".format(g))
continue
if ax_count % (nr * nc) == 0:
if ax_count > 0:
plt.tight_layout()
#pdf.savefig()
#plt.close(fig)
figs.append(fig)
fig = plt.figure(figsize=figsize)
axes = get_page_axes()
ax_count = 0
ax = axes[ax_count]
#if obs_g.shape[0] == 1:
# ax.scatter(list(obs_g.sim),list(obs_g.obsval),marker='.',s=30,color='b')
#else:
mx = max(obs_g.obsval.max(), obs_g.sim.max())
mn = min(obs_g.obsval.min(), obs_g.sim.min())
#if obs_g.shape[0] == 1:
mx *= 1.1
mn *= 0.9
ax.axis('square')
if plot_hexbin:
ax.hexbin(obs_g.obsval.values, obs_g.sim.values, mincnt=1, gridsize=(75, 75),
extent=(mn, mx, mn, mx), bins='log', edgecolors=None)
# plt.colorbar(ax=ax)
else:
ax.scatter([obs_g.obsval], [obs_g.sim], marker='.', s=10, color='b')
ax.plot([mn,mx],[mn,mx],'k--',lw=1.0)
xlim = (mn,mx)
ax.set_xlim(mn,mx)
ax.set_ylim(mn,mx)
ax.grid()
ax.set_xlabel("observed",labelpad=0.1)
ax.set_ylabel("simulated",labelpad=0.1)
ax.set_title("{0}) group:{1}, {2} observations".
format(abet[ax_count], g, obs_g.shape[0]), loc="left")
ax_count += 1
if histogram==False:
ax = axes[ax_count]
ax.scatter(obs_g.obsval, obs_g.res, marker='.', s=10, color='b')
ylim = ax.get_ylim()
mx = max(np.abs(ylim[0]), np.abs(ylim[1]))
if obs_g.shape[0] == 1:
mx *= 1.1
ax.set_ylim(-mx, mx)
#show a zero residuals line
ax.plot(xlim, [0,0], 'k--', lw=1.0)
meanres= obs_g.res.mean()
# show mean residuals line
ax.plot(xlim,[meanres,meanres], 'r-', lw=1.0)
ax.set_xlim(xlim)
ax.set_ylabel("residual",labelpad=0.1)
ax.set_xlabel("observed",labelpad=0.1)
ax.set_title("{0}) group:{1}, {2} observations".
format(abet[ax_count], g, obs_g.shape[0]), loc="left")
ax.grid()
ax_count += 1
else:
ax = axes[ax_count]
ax.hist(obs_g.res, 50, color='b')
meanres= obs_g.res.mean()
ax.axvline(meanres, color='r', lw=1)
b,t = ax.get_ylim()
ax.text(meanres + meanres/10,
t - t/10,
'Mean: {:.2f}'.format(meanres))
ax_count += 1
logger.log("plotting 1to1 for {0}".format(g))
for a in range(ax_count, nr * nc):
axes[a].set_axis_off()
axes[a].set_yticks([])
axes[a].set_xticks([])
plt.tight_layout()
#pdf.savefig()
#plt.close(fig)
figs.append(fig)
if filename is not None:
with PdfPages(filename) as pdf:
for fig in figs:
pdf.savefig(fig)
plt.close(fig)
logger.log("plot res_1to1")
else:
logger.log("plot res_1to1")
return figs
|
python
|
def comparebed(args):
"""
%prog comparebed AP.chr.bed infer.bed
Compare the scaffold links indicated in two bed files.
"""
p = OptionParser(comparebed.__doc__)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
abed, bbed = args
abed = Bed(abed)
bbed = Bed(bbed)
query_links(abed, bbed)
query_links(bbed, abed)
|
java
|
public static <T> Iterator<T> concat(final Iterator<? extends T>... iterators) {
return new ImmutableIterator<T>() {
int current = 0;
@Override
public boolean hasNext() {
advance();
return current < iterators.length;
}
@Override
public T next() {
advance();
try {
return iterators[current].next();
} catch (ArrayIndexOutOfBoundsException e) {
throw new NoSuchElementException();
}
}
private void advance() {
while (current < iterators.length && !iterators[current].hasNext()) {
current++;
}
}
};
}
|
python
|
def RemoveEventHandler(self, wb):
""" Removes an event handler. """
from UcsBase import WriteUcsWarning
if wb in self._wbs:
self._remove_watch_block(wb)
else:
WriteUcsWarning("Event handler not found")
|
java
|
public static JInternalFrame newInternalFrame(final String title, final boolean resizable,
final boolean closable, final boolean maximizable, final boolean iconifiable)
{
final JInternalFrame internalFrame = new JInternalFrame(title, resizable, closable,
maximizable, iconifiable);
return internalFrame;
}
|
java
|
public void marshall(DeleteUserRequest deleteUserRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteUserRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteUserRequest.getUserName(), USERNAME_BINDING);
protocolMarshaller.marshall(deleteUserRequest.getAwsAccountId(), AWSACCOUNTID_BINDING);
protocolMarshaller.marshall(deleteUserRequest.getNamespace(), NAMESPACE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
|
python
|
def call_hook(message,
attachment=None,
color='good',
short=False,
identifier=None,
channel=None,
username=None,
icon_emoji=None):
'''
Send message to Slack incoming webhook.
:param message: The topic of message.
:param attachment: The message to send to the Slacke WebHook.
:param color: The color of border of left side
:param short: An optional flag indicating whether the value is short
enough to be displayed side-by-side with other values.
:param identifier: The identifier of WebHook.
:param channel: The channel to use instead of the WebHook default.
:param username: Username to use instead of WebHook default.
:param icon_emoji: Icon to use instead of WebHook default.
:return: Boolean if message was sent successfully.
CLI Example:
.. code-block:: bash
salt '*' slack.call_hook message='Hello, from SaltStack'
'''
base_url = 'https://hooks.slack.com/services/'
if not identifier:
identifier = _get_hook_id()
url = _urljoin(base_url, identifier)
if not message:
log.error('message is required option')
if attachment:
payload = {
'attachments': [
{
'fallback': message,
'color': color,
'pretext': message,
'fields': [
{
"value": attachment,
"short": short,
}
]
}
]
}
else:
payload = {
'text': message,
}
if channel:
payload['channel'] = channel
if username:
payload['username'] = username
if icon_emoji:
payload['icon_emoji'] = icon_emoji
data = _urlencode(
{
'payload': salt.utils.json.dumps(payload)
}
)
result = salt.utils.http.query(url, method='POST', data=data, status=True)
if result['status'] <= 201:
return True
else:
return {
'res': False,
'message': result.get('body', result['status'])
}
|
python
|
def _get_xy_scaling_parameters(self):
"""Get the X/Y coordinate limits for the full resulting image"""
return self.mx, self.bx, self.my, self.by
|
python
|
def set_active_current(self, settings):
'''
Sets the amperage of each motor for when it is activated by driver.
Values are initialized from the `robot_config.high_current` values,
and can then be changed through this method by other parts of the API.
For example, `Pipette` setting the active-current of it's pipette,
depending on what model pipette it is, and what action it is performing
settings
Dict with axes as valies (e.g.: 'X', 'Y', 'Z', 'A', 'B', or 'C')
and floating point number for current (generally between 0.1 and 2)
'''
self._active_current_settings['now'].update(settings)
# if an axis specified in the `settings` is currently active,
# reset it's current to the new active-current value
active_axes_to_update = {
axis: amperage
for axis, amperage in self._active_current_settings['now'].items()
if self._active_axes.get(axis) is True
if self.current[axis] != amperage
}
if active_axes_to_update:
self._save_current(active_axes_to_update, axes_active=True)
|
java
|
public List<Point3D_F64> getLandmark3D( int version ) {
int N = QrCode.totalModules(version);
set3D( 0,0,N,point3D.get(0));
set3D( 0,7,N,point3D.get(1));
set3D( 7,7,N,point3D.get(2));
set3D( 7,0,N,point3D.get(3));
set3D( 0,N-7,N,point3D.get(4));
set3D( 0,N,N,point3D.get(5));
set3D( 7,N,N,point3D.get(6));
set3D( 7,N-7,N,point3D.get(7));
set3D( N-7,0,N,point3D.get(8));
set3D( N-7,7,N,point3D.get(9));
set3D( N,7,N,point3D.get(10));
set3D( N,0,N,point3D.get(11));
return point3D;
}
|
java
|
public static void setXwpfDocument(String path) throws IOException {
InputStream is = new FileInputStream(path);
xwpfDocument = new XWPFDocument(is);
is.close();
}
|
python
|
def parse_file(cls, filename, encoding='utf-8', required=True, resolve=True, unresolved_value=DEFAULT_SUBSTITUTION):
"""Parse file
:param filename: filename
:type filename: basestring
:param encoding: file encoding
:type encoding: basestring
:param required: If true, raises an exception if can't load file
:type required: boolean
:param resolve: if true, resolve substitutions
:type resolve: boolean
:param unresolved_value: assigned value value to unresolved substitution.
If overriden with a default value, it will replace all unresolved value to the default value.
If it is set to to pyhocon.STR_SUBSTITUTION then it will replace the value by its substitution expression (e.g., ${x})
:type unresolved_value: boolean
:return: Config object
:type return: Config
"""
try:
with codecs.open(filename, 'r', encoding=encoding) as fd:
content = fd.read()
return cls.parse_string(content, os.path.dirname(filename), resolve, unresolved_value)
except IOError as e:
if required:
raise e
logger.warn('Cannot include file %s. File does not exist or cannot be read.', filename)
return []
|
python
|
def local_list(timestamps=True):
"""
Return a list of the locally available dictionnaries. Each element is a
tuple of the dictionnary name and its last modification date as a
timestamp.
"""
init_storage()
lst = []
for d in glob(os.path.join(DICTS_PATH, '*.txt')):
name = d.split('/')[-1].replace('.txt', '')
lst.append((name, os.path.getmtime(d)) if timestamps else name)
return lst
|
python
|
def is_valid_scalar(self, node: ValueNode) -> None:
"""Check whether this is a valid scalar.
Any value literal may be a valid representation of a Scalar, depending on that
scalar type.
"""
# Report any error at the full type expected by the location.
location_type = self.context.get_input_type()
if not location_type:
return
type_ = get_named_type(location_type)
if not is_scalar_type(type_):
self.report_error(
GraphQLError(
bad_value_message(
location_type,
print_ast(node),
enum_type_suggestion(type_, node),
),
node,
)
)
return
# Scalars determine if a literal value is valid via `parse_literal()` which may
# throw or return an invalid value to indicate failure.
type_ = cast(GraphQLScalarType, type_)
try:
parse_result = type_.parse_literal(node)
if is_invalid(parse_result):
self.report_error(
GraphQLError(
bad_value_message(location_type, print_ast(node)), node
)
)
except Exception as error:
# Ensure a reference to the original error is maintained.
self.report_error(
GraphQLError(
bad_value_message(location_type, print_ast(node), str(error)),
node,
original_error=error,
)
)
|
python
|
def rev_comp(dna):
"""
Parameters
----------
dna : bytearray
dna sequence to be reverse complemented
"""
i2 = len(dna) - 1
l = len(dna) // 2
for i in range(0, l):
b = DNA_COMP_DICT[dna[i]]
dna[i] = DNA_COMP_DICT[dna[i2]]
dna[i2] = b
i2 -= 1
|
python
|
def write_changelog(debug=False):
"""Write a changelog based on the git changelog."""
changelog = _iter_log_oneline(debug)
if changelog:
changelog = _iter_changelog(changelog)
if not changelog:
return
if debug:
print('Writing ChangeLog')
new_changelog = os.path.join(os.path.curdir, 'ChangeLog')
# If there's already a ChangeLog and it's not writable, just use it
if (os.path.exists(new_changelog)
and not os.access(new_changelog, os.W_OK)):
return
with io.open(new_changelog, 'w', encoding='utf-8') as changelog_file:
for release, content in changelog:
changelog_file.write(content)
|
java
|
public void setDescription(CmsObject cms, String description) {
if (CmsStringUtil.isEmptyOrWhitespaceOnly(description)) {
m_description = "";
if (m_element.element(PARAM_DESCRIPTION) != null) {
m_element.remove(m_element.element(PARAM_DESCRIPTION));
}
} else {
m_description = description;
description = CmsEncoder.escapeWBlanks(description, CmsEncoder.ENCODING_UTF_8);
}
setParameterValue(cms, PARAM_DESCRIPTION, description);
}
|
java
|
public static byte[] longToBytesNoLeadZeroes(long val) {
// todo: improve performance by while strip numbers until (long >> 8 == 0)
if (val == 0)
return EMPTY_BYTE_ARRAY;
byte[] data = ByteBuffer.allocate(8).putLong(val).array();
return stripLeadingZeroes(data);
}
|
python
|
def get_usage(self, loadbalancer=None, start=None, end=None):
"""
Return the load balancer usage records for this account. If 'loadbalancer'
is None, records for all load balancers are returned. You may optionally
include a start datetime or an end datetime, or both, which will limit
the records to those on or after the start time, and those before or on the
end time. These times should be Python datetime.datetime objects, Python
datetime.date objects, or strings in the format: "YYYY-MM-DD HH:MM:SS" or
"YYYY-MM-DD".
"""
return self._manager.get_usage(loadbalancer=loadbalancer, start=start,
end=end)
|
java
|
public static Key JOIN_PROMPT(String value, Voice voice) {
Map<String, String> map = new HashMap<String, String>();
map.put("value", value);
if (voice != null) {
map.put("voice", voice.toString());
}
return createKey("joinPrompt", map);
}
|
java
|
public void senSegment(String text, List senList){
senList.clear();
String resultStr = senSegment(text);
StringTokenizer senTknr = new StringTokenizer(resultStr, "\n");
while(senTknr.hasMoreTokens()){
senList.add(senTknr.nextToken());
}
}
|
java
|
public static <T> BufferedIterator<T> buffered(Iterator<T> self) {
if (self instanceof BufferedIterator) {
return (BufferedIterator<T>) self;
} else {
return new IteratorBufferedIterator<T>(self);
}
}
|
java
|
@Override
public Number getNumber(int index) {
check(index);
return CBLConverter.asNumber(fleeceValueToObject(index));
}
|
python
|
def error_handler(_, event):
# type: (Any, Any) -> int
""" Specifies the program's supplied error handler. """
evt = event.contents
ERROR.details = {
"type": evt.type,
"serial": evt.serial,
"error_code": evt.error_code,
"request_code": evt.request_code,
"minor_code": evt.minor_code,
}
return 0
|
python
|
def main(argv=None):
"""
September 18, 2014: if an arg is passed, we visualize it
Otherwise a simple shell gets opened.
"""
print("Ontospy " + ontospy.VERSION)
ontospy.get_or_create_home_repo()
if argv:
print("Argument passing not implemented yet")
if False:
onto = Model(argv[0])
for x in onto.get_classes():
print(x)
onto.buildPythonClasses()
s = Sketch()
else:
intro = """Good morning. Ready to Turtle away. Type docs() for help."""
# idea: every time provide a different ontology maxim!
def docs():
print("\n".join([x.strip() for x in Sketch.__doc__.splitlines()]))
default_sketch = Sketch()
def add(text=""):
default_sketch.add(text)
def show(aformat=None):
if aformat:
default_sketch.show(aformat)
else:
default_sketch.show()
def bind(prefixTuple):
default_sketch.bind(prefixTuple)
def clear():
default_sketch.clear()
def omnigraffle():
default_sketch.omnigraffle()
try:
# note: this requires IPython 0.11 or above
import IPython
IPython.embed(banner1=intro)
except:
import code
code.interact(banner=intro, local=dict(globals(), **locals()))
# finally
sys.exit(0)
|
java
|
public final static String getDefaultType() {
String cpvtype =
AccessController.doPrivileged(new PrivilegedAction<String>() {
public String run() {
return Security.getProperty(CPV_TYPE);
}
});
return (cpvtype == null) ? "PKIX" : cpvtype;
}
|
java
|
@Nonnull
public final XMLWriterSettings setIncorrectCharacterHandling (@Nonnull final EXMLIncorrectCharacterHandling eIncorrectCharacterHandling)
{
m_eIncorrectCharacterHandling = ValueEnforcer.notNull (eIncorrectCharacterHandling, "IncorrectCharacterHandling");
return this;
}
|
java
|
private void type(String type) throws IOException {
if (this.packagePrefix == null) {
throw new IllegalStateException();
}
Matcher m = TYPE_PATTERN.matcher(type);
int pos = 0;
while (true) {
boolean found = m.find(pos);
// copy non-matching characters like "<"
int typeStart = found ? m.start() : type.length();
out.write(type, pos, typeStart - pos);
if (!found) {
break;
}
// copy a single class name, shortening it if possible
String name = m.group(0);
String imported;
if ((imported = importedTypes.get(name)) != null) {
out.write(imported);
} else if (name.startsWith(packagePrefix)
&& name.indexOf('.', packagePrefix.length()) == -1) {
out.write(name.substring(packagePrefix.length()));
} else if (name.startsWith("java.lang.")) {
out.write(name.substring("java.lang.".length()));
} else {
out.write(name);
}
pos = m.end();
}
}
|
python
|
def call(func, max_attempts=None, exceptions=Exception, wait=0.0,
cleanup_hook=None, pre_retry_hook=None):
""" :param func (callable):
The function to retry. No arguments are passed to this function.
If your function requires arguments, consider defining a separate
function or use functools.partial / a lambda function.
:param max_attempts:
Any integer number to limit the maximum number of attempts.
Set to None for unlimited retries.
:param exceptions:
A tuple of exceptions that should result in a retry. Catches
everything derived from 'Exception' by default.
:param wait:
This can be an integer / float to specify the waittime in seconds
before the next attempt. You can also pass a function which accepts
a single argument 'attempt'.
:param cleanup_hook:
Can be set to a callable and will be called after an exception is
raised from calling `func`.
No arguments are passed to this function.
If your function requires arguments, consider defining a separate
function or use functools.partial / a lambda function.
:param pre_retry_hook:
Can be set to any callable that will be called before `function`
is called.
No arguments are passed to this function.
If your function requires arguments, consider defining a separate
function or use functools.partial / a lambda function.
If `wait` is set, `pre_retry_hook` will be called before the
waittime.
Exceptions that are raised when calling this hook are not caught.
:returns:
The result of calling the given `func`.
:raises:
Any exception which is
- not in the given `exceptions`
- raised in `pre_retry_hook` or in `cleanup_hook`
- raised in the last attempt at calling `func`
"""
# we check the callables in advance to prevent raising exceptions
# after making the first attempt
_assert_callable(func, allow_none=False)
_assert_callable(cleanup_hook, allow_none=True)
_assert_callable(pre_retry_hook, allow_none=True)
if not (max_attempts is None or max_attempts >= 1):
raise ValueError('max_attempts must be None or an integer >= 1')
# if the user sets the waittime to a fixed value (int or float) we create
# a function which always returns this fixed value. This way we avoid
# having to make this decision in the retry loop.
wait_func = wait if type(wait) not in [int, float] else lambda _: wait
_assert_callable(wait_func, allow_none=False)
def log_failed_attempt(attempt, error):
if max_attempts is None:
nr_display = '{0}'.format(attempt)
else:
nr_display = '{0} / {1}'.format(attempt, max_attempts)
logger.debug('Attempt {nr} at calling {func} failed ({msg})'
.format(nr=nr_display, func=func, msg=error))
for attempt, f in enumerate(_repeat(func, max_attempts), start=1):
try:
return f()
except exceptions as e:
log_failed_attempt(attempt=attempt, error=e)
if cleanup_hook is not None:
cleanup_hook()
if attempt == max_attempts:
raise
if wait:
waittime = wait_func(attempt)
time.sleep(waittime)
if pre_retry_hook is not None:
pre_retry_hook()
|
python
|
def ConvCnstrMOD(*args, **kwargs):
"""A wrapper function that dynamically defines a class derived from
one of the implementations of the Convolutional Constrained MOD
problems, and returns an object instantiated with the provided
parameters. The wrapper is designed to allow the appropriate
object to be created by calling this function using the same
syntax as would be used if it were a class. The specific
implementation is selected by use of an additional keyword
argument 'method'. Valid values are:
- ``'ism'`` :
Use the implementation defined in :class:`.ConvCnstrMOD_IterSM`. This
method works well for a small number of training images, but is very
slow for larger training sets.
- ``'cg'`` :
Use the implementation defined in :class:`.ConvCnstrMOD_CG`. This
method is slower than ``'ism'`` for small training sets, but has better
run time scaling as the training set grows.
- ``'cns'`` :
Use the implementation defined in :class:`.ConvCnstrMOD_Consensus`.
This method is a good choice for large training sets.
- ``'fista'`` :
Use the implementation defined in :class:`.fista.ccmod.ConvCnstrMOD`.
This method is the best choice for large training sets.
The default value is ``'fista'``.
"""
# Extract method selection argument or set default
method = kwargs.pop('method', 'fista')
# Assign base class depending on method selection argument
base = ccmod_class_label_lookup(method)
# Nested class with dynamically determined inheritance
class ConvCnstrMOD(base):
def __init__(self, *args, **kwargs):
super(ConvCnstrMOD, self).__init__(*args, **kwargs)
# Allow pickling of objects of type ConvCnstrMOD
_fix_dynamic_class_lookup(ConvCnstrMOD, method)
# Return object of the nested class type
return ConvCnstrMOD(*args, **kwargs)
|
java
|
public static JSONObject getWidgetConfigurationAsJSON(String widgetConfiguration) {
JSONObject result = new JSONObject();
if (CmsStringUtil.isEmptyOrWhitespaceOnly(widgetConfiguration)) {
return result;
}
Map<String, String> confEntries = CmsStringUtil.splitAsMap(
widgetConfiguration,
CONF_PARAM_SEPARATOR,
CONF_KEYVALUE_SEPARATOR);
for (Map.Entry<String, String> entry : confEntries.entrySet()) {
try {
result.put(entry.getKey(), entry.getValue());
} catch (JSONException e) {
// should never happen
LOG.error(
Messages.get().container(Messages.ERR_XMLCONTENT_UNKNOWN_ELEM_PATH_SCHEMA_1, widgetConfiguration),
e);
}
}
return result;
}
|
java
|
public Object sqlToJava(Object source)
{
try
{
ByteArrayInputStream stringIn =
new ByteArrayInputStream(((String) source).getBytes());
Base64.InputStream uuIn =
new Base64.InputStream(stringIn, Base64.DECODE, false);
GZIPInputStream gzipIn = new GZIPInputStream(uuIn);
ObjectInputStream objIn = new ObjectInputStream(gzipIn);
Object result = objIn.readObject();
objIn.close();
return result;
}
catch (Throwable t)
{
throw new ConversionException(t);
}
}
|
java
|
public static <T> List<T> toposort(List<T> inputs,
T root, final Deps<T> deps, boolean isFullCut) {
// Get inputs as a set.
final HashSet<T> inputSet = new HashSet<T>(inputs);
if (inputSet.size() != inputs.size()) {
throw new IllegalStateException("Multiple copies of module in inputs list: " + inputs);
}
return toposort(inputSet, root, deps, isFullCut);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.