language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python
|
def _init_modules_stub(self, **_):
"""Initializes the modules stub based off of your current yaml files
Implements solution from
http://stackoverflow.com/questions/28166558/invalidmoduleerror-when-using-testbed-to-unit-test-google-app-engine
"""
from google.appengine.api import request_info
# edit all_versions per modules & versions thereof needing tests
all_versions = {} # {'default': [1], 'andsome': [2], 'others': [1]}
def_versions = {} # {m: all_versions[m][0] for m in all_versions}
m2h = {} # {m: {def_versions[m]: 'localhost:8080'} for m in def_versions}
for module in self.configuration.modules:
module_name = module._module_name or 'default'
module_version = module._version or '1'
all_versions[module_name] = [module_version]
def_versions[module_name] = module_version
m2h[module_name] = {module_version: 'localhost:8080'}
request_info._local_dispatcher = request_info._LocalFakeDispatcher(
module_names=list(all_versions),
module_name_to_versions=all_versions,
module_name_to_default_versions=def_versions,
module_name_to_version_to_hostname=m2h)
self.testbed.init_modules_stub()
|
java
|
public static vlan_channel_binding[] get(nitro_service service, Long id) throws Exception{
vlan_channel_binding obj = new vlan_channel_binding();
obj.set_id(id);
vlan_channel_binding response[] = (vlan_channel_binding[]) obj.get_resources(service);
return response;
}
|
python
|
async def traverse_settings(dev, module, settings, depth=0):
"""Print all available settings."""
for setting in settings:
if setting.is_directory:
print("%s%s (%s)" % (depth * " ", setting.title, module))
return await traverse_settings(dev, module, setting.settings, depth + 2)
else:
try:
print_settings([await setting.get_value(dev)], depth=depth)
except SongpalException as ex:
err("Unable to read setting %s: %s" % (setting, ex))
continue
|
java
|
public void setRolePolicyList(java.util.Collection<PolicyDetail> rolePolicyList) {
if (rolePolicyList == null) {
this.rolePolicyList = null;
return;
}
this.rolePolicyList = new com.amazonaws.internal.SdkInternalList<PolicyDetail>(rolePolicyList);
}
|
java
|
public static Number numberFromSql(Connection connection, String sql, Object... args) throws SQLException
{
return OrmReader.numberFromSql(connection, sql, args);
}
|
python
|
def start_span(self, request, headers, peer_host, peer_port):
"""
Start a new server-side span. If the span has already been started
by `start_basic_span`, this method only adds baggage from the headers.
:param request: inbound tchannel.tornado.request.Request
:param headers: dictionary containing parsed application headers
:return:
"""
parent_context = None
# noinspection PyBroadException
try:
if headers and hasattr(headers, 'iteritems'):
tracing_headers = {
k[len(TRACING_KEY_PREFIX):]: v
for k, v in headers.iteritems()
if k.startswith(TRACING_KEY_PREFIX)
}
parent_context = self.tracer.extract(
format=opentracing.Format.TEXT_MAP,
carrier=tracing_headers
)
if self.span and parent_context:
# we already started a span from Tracing fields,
# so only copy baggage from the headers.
for k, v in parent_context.baggage.iteritems():
self.span.set_baggage_item(k, v)
except:
log.exception('Cannot extract tracing span from headers')
if self.span is None:
self.span = self.tracer.start_span(
operation_name=request.endpoint,
child_of=parent_context,
tags={tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER},
)
if 'cn' in request.headers:
self.span.set_tag(tags.PEER_SERVICE, request.headers['cn'])
if peer_host:
self.span.set_tag(tags.PEER_HOST_IPV4, peer_host)
if peer_port:
self.span.set_tag(tags.PEER_PORT, peer_port)
if 'as' in request.headers:
self.span.set_tag('as', request.headers['as'])
return self.span
|
java
|
protected final void completedReceived(AIStreamKey key, boolean reissueGet)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "completedReceived", new Object[] {key, Boolean.valueOf(reissueGet)});
completedReceivedNoPrefetch(key, reissueGet);
try
{
if (readAhead)
tryPrefetching();
}
catch (SIResourceException e)
{
FFDCFilter.processException(
e,
"com.ibm.ws.sib.processor.impl.RemoteQPConsumerKey.completedReceived",
"1:568:1.47.1.26",
this);
SibTr.exception(tc, e);
// no need to throw this exception, since its only a failure in prefetching
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "completedReceived");
}
|
java
|
public static View create(String name, String map, String reduce) {
return new DefaultView(name, map, reduce);
}
|
java
|
protected void updateText()
{
this.text.setLength(0);
this.text.append(password.toString().replaceAll("(?s).", String.valueOf(passwordChar)));
guiText.setText(text.toString());
}
|
python
|
def get(self):
"""Returns existing value, or None if deadline has expired."""
if self.timer() > self.deadline:
self.value = None
return self.value
|
java
|
protected Throwable composeErrors(Throwable... errors) {
if (errors == null) {
return new NullPointerException("errors");
} else if (errors.length == 1) {
return errors[0];
} else {
return new ComposedException(errors);
}
}
|
python
|
def sample_discrete_from_log(p_log,axis=0,dtype=np.int32):
'samples log probability array along specified axis'
cumvals = np.exp(p_log - np.expand_dims(p_log.max(axis),axis)).cumsum(axis) # cumlogaddexp
thesize = np.array(p_log.shape)
thesize[axis] = 1
randvals = random(size=thesize) * \
np.reshape(cumvals[[slice(None) if i is not axis else -1
for i in range(p_log.ndim)]],thesize)
return np.sum(randvals > cumvals,axis=axis,dtype=dtype)
|
python
|
def build_table(self):
"""Build table."""
headers = ['Title', 'Seeders', 'Leechers', 'Age', 'Size']
titles = []
seeders = []
leechers = []
ages = []
sizes = []
if self.page == 'torrent_project':
titles = [list(span.find('a').stripped_strings)[0]
for span in self.elements[0]]
seeders = [span.get_text() for span in self.elements[1]]
leechers = [span.get_text() for span in self.elements[2]]
ages = [span.get_text() for span in self.elements[3]]
sizes = [span.get_text() for span in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
span.find('a')['href']
for span in self.elements[0]]
elif self.page == 'the_pirate_bay':
for elem in self.elements[0]:
title = elem.find('a', {'class': 'detLink'}).get_text()
titles.append(title)
font_text = elem.find(
'font', {'class': 'detDesc'}).get_text()
dammit = UnicodeDammit(font_text)
age, size = dammit.unicode_markup.split(',')[:-1]
ages.append(age)
sizes.append(size)
# Torrent
href = self.domain + \
elem.find('a', title=re.compile('magnet'))['href']
self.hrefs.append(str(href))
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
elif self.page == '1337x':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text('|').split('|')[0]
for elem in self.elements[4]]
# Torrent
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'eztv':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[4]]
leechers = ['-' for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find(href=re.compile('magnet'))['href']
for elem in self.elements[1]]
elif self.page == 'limetorrents':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[3]]
leechers = [elem.get_text() for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[1]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find('a', href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'isohunt':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[5]]
leechers = ['-' for elem in self.elements[5]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent_details'))['href']
for elem in self.elements[0]]
else:
print('Error page')
self.table = [[Colors.BOLD +
UnicodeDammit(titles[i][:75].strip(), ["utf-8"]).unicode_markup +
Colors.ENDC
if (i + 1) % 2 == 0
else UnicodeDammit(
titles[i][:75].strip()).unicode_markup,
Colors.SEEDER + seeders[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LGREEN + seeders[i].strip() + Colors.ENDC,
Colors.LEECHER + leechers[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LRED + leechers[i].strip() + Colors.ENDC,
Colors.LIGHTBLUE + ages[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.BLUE + ages[i].strip() + Colors.ENDC,
Colors.PINK + sizes[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.PURPLE + sizes[i].strip() + Colors.ENDC]
for i in range(len(self.hrefs))]
print(tabulate(self.table,
headers=headers,
tablefmt='psql',
numalign='right',
stralign='left',
showindex=True))
|
java
|
@Nullable
public Bitmap updateBitmap(String id, @Nullable Bitmap bitmap) {
return lottieDrawable.updateBitmap(id, bitmap);
}
|
python
|
def download(self,
task,
default_ext,
timeout=5,
max_retry=3,
overwrite=False,
**kwargs):
"""Download the image and save it to the corresponding path.
Args:
task (dict): The task dict got from ``task_queue``.
timeout (int): Timeout of making requests for downloading images.
max_retry (int): the max retry times if the request fails.
**kwargs: reserved arguments for overriding.
"""
file_url = task['file_url']
task['success'] = False
task['filename'] = None
retry = max_retry
if not overwrite:
with self.lock:
self.fetched_num += 1
filename = self.get_filename(task, default_ext)
if self.storage.exists(filename):
self.logger.info('skip downloading file %s', filename)
return
self.fetched_num -= 1
while retry > 0 and not self.signal.get('reach_max_num'):
try:
response = self.session.get(file_url, timeout=timeout)
except Exception as e:
self.logger.error('Exception caught when downloading file %s, '
'error: %s, remaining retry times: %d',
file_url, e, retry - 1)
else:
if self.reach_max_num():
self.signal.set(reach_max_num=True)
break
elif response.status_code != 200:
self.logger.error('Response status code %d, file %s',
response.status_code, file_url)
break
elif not self.keep_file(task, response, **kwargs):
break
with self.lock:
self.fetched_num += 1
filename = self.get_filename(task, default_ext)
self.logger.info('image #%s\t%s', self.fetched_num, file_url)
self.storage.write(filename, response.content)
task['success'] = True
task['filename'] = filename
break
finally:
retry -= 1
|
python
|
def filter_by_rand(self, p:float, seed:int=None):
"Keep random sample of `items` with probability `p` and an optional `seed`."
if seed is not None: np.random.seed(seed)
return self.filter_by_func(lambda o: rand_bool(p))
|
python
|
def get_traceback(f):
"""
Multiprocessing doesn't forward exception traceback information. This does.
From: http://pragmaticpython.com/2017/02/19/
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception, ex:
ret = '#' * 60
ret += "\nException caught:"
ret += "\n" + '-' * 60
ret += "\n" + traceback.format_exc()
ret += "\n" + '-' * 60
ret += "\n" + "#" * 60
print sys.stderr, ret
sys.stderr.flush()
raise ex
return wrapper
|
python
|
def settings_to_cmd_args(settings_dict):
"""
Copied from django 1.8 MySQL backend DatabaseClient - where the runshell
commandline creation has been extracted and made callable like so.
"""
args = ['mysql']
db = settings_dict['OPTIONS'].get('db', settings_dict['NAME'])
user = settings_dict['OPTIONS'].get('user', settings_dict['USER'])
passwd = settings_dict['OPTIONS'].get('passwd', settings_dict['PASSWORD'])
host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
cert = settings_dict['OPTIONS'].get('ssl', {}).get('ca')
defaults_file = settings_dict['OPTIONS'].get('read_default_file')
# Seems to be no good way to set sql_mode with CLI.
if defaults_file:
args += ["--defaults-file=%s" % defaults_file]
if user:
args += ["--user=%s" % user]
if passwd:
args += ["--password=%s" % passwd]
if host:
if '/' in host:
args += ["--socket=%s" % host]
else:
args += ["--host=%s" % host]
if port:
args += ["--port=%s" % port]
if cert:
args += ["--ssl-ca=%s" % cert]
if db:
args += [db]
return args
|
python
|
def applicationinsights_mgmt_plane_client(cli_ctx, _, subscription=None):
"""Initialize Log Analytics mgmt client for use with CLI."""
from .vendored_sdks.mgmt_applicationinsights import ApplicationInsightsManagementClient
from azure.cli.core._profile import Profile
profile = Profile(cli_ctx=cli_ctx)
# Use subscription from resource_id where possible, otherwise use login.
if subscription:
cred, _, _ = profile.get_login_credentials(subscription_id=subscription)
return ApplicationInsightsManagementClient(
cred,
subscription
)
cred, sub_id, _ = profile.get_login_credentials()
return ApplicationInsightsManagementClient(
cred,
sub_id
)
|
java
|
public Mono<String> getUaaUrl() {
this.uaaUrl = this.webClient.get().uri(this.cloudControllerUrl + "/info")
.retrieve().bodyToMono(Map.class)
.map((response) -> (String) response.get("token_endpoint")).cache()
.onErrorMap((ex) -> new CloudFoundryAuthorizationException(
Reason.SERVICE_UNAVAILABLE,
"Unable to fetch token keys from UAA."));
return this.uaaUrl;
}
|
python
|
def Cache(fn):
""" Function cache decorator """
def fnCache(*args, **kwargs):
""" Cache function """
key = (args and tuple(args) or None,
kwargs and frozenset(kwargs.items()) or None)
if key not in fn.__cached__:
fn.__cached__[key] = cache = fn(*args, **kwargs)
else:
cache = fn.__cached__[key]
return cache
def ResetCache():
""" Reset cache """
fn.__cached__ = {}
setattr(fn, "__cached__", {})
setattr(fn, "__resetcache__", ResetCache)
fnCache.__name__ = fn.__name__
fnCache.__doc__ = fn.__doc__
fnCache.__dict__.update(fn.__dict__)
return fnCache
|
java
|
private static boolean javaTimeTypes(ExpressionTree tree, VisitorState state) {
if (packageStartsWith("java.time").matches(tree, state)) {
return false;
}
Symbol symbol = ASTHelpers.getSymbol(tree);
if (symbol instanceof MethodSymbol) {
MethodSymbol methodSymbol = (MethodSymbol) symbol;
if (methodSymbol.owner.packge().getQualifiedName().toString().startsWith("java.time")
&& methodSymbol.getModifiers().contains(Modifier.PUBLIC)) {
if (ALLOWED_JAVA_TIME_METHODS.matches(tree, state)) {
return false;
}
return true;
}
}
return false;
}
|
java
|
@Override
public void register( YogaEntityConfiguration<?>... entityConfigurations )
{
for ( YogaEntityConfiguration<?> entityConfiguration : entityConfigurations )
{
Class<?> type = entityConfiguration.getEntityClass();
if ( type == null )
{
throw new IllegalArgumentException("Entity configuration must define an entity class to configure: "
+ entityConfiguration.getClass().getName());
}
_registry.put( type, entityConfiguration );
}
}
|
java
|
protected Map<String,String> buildRoot(HttpServletRequest request,
XMLOutputter doc) {
final String path = request.getPathInfo() != null
? request.getPathInfo() : "/";
final String exclude = request.getParameter("exclude") != null
? request.getParameter("exclude") : "\\..*\\.crc";
final String filter = request.getParameter("filter") != null
? request.getParameter("filter") : ".*";
final boolean recur = request.getParameter("recursive") != null
&& "yes".equals(request.getParameter("recursive"));
Map<String, String> root = new HashMap<String, String>();
root.put("path", path);
root.put("recursive", recur ? "yes" : "no");
root.put("filter", filter);
root.put("exclude", exclude);
root.put("time", df.get().format(new Date()));
root.put("version", VersionInfo.getVersion());
return root;
}
|
python
|
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
names_key = registry_key.GetSubkeyByName('Names')
if not names_key:
parser_mediator.ProduceExtractionWarning('missing subkey: Names.')
return
last_written_time_per_username = {
registry_value.name: registry_value.last_written_time
for registry_value in names_key.GetSubkeys()}
for subkey in registry_key.GetSubkeys():
if subkey.name == 'Names':
continue
try:
f_value = self._ParseFValue(subkey)
except errors.ParseError as exception:
parser_mediator.ProduceExtractionWarning(
'unable to parse F value with error: {0!s}'.format(exception))
continue
registry_value = subkey.GetValueByName('V')
if not registry_value:
parser_mediator.ProduceExtractionWarning(
'missing Registry value: "V" in subkey: {0:s}.'.format(
subkey.name))
continue
v_value_map = self._GetDataTypeMap('v_value')
try:
v_value = self._ReadStructureFromByteStream(
registry_value.data, 0, v_value_map)
except (ValueError, errors.ParseError) as exception:
parser_mediator.ProduceExtractionWarning(
'unable to parse V value with error: {0!s}'.format(exception))
continue
username = self._ParseVValueString(
parser_mediator, registry_value.data, v_value[1])
fullname = self._ParseVValueString(
parser_mediator, registry_value.data, v_value[2])
comments = self._ParseVValueString(
parser_mediator, registry_value.data, v_value[3])
last_written_time = last_written_time_per_username.get(username, None)
# TODO: check if subkey.name == f_value.rid
if last_written_time:
values_dict = {
'account_rid': f_value.rid,
'login_count': f_value.number_of_logons}
if username:
values_dict['username'] = username
if fullname:
values_dict['full_name'] = fullname
if comments:
values_dict['comments'] = comments
event_data = windows_events.WindowsRegistryEventData()
event_data.key_path = registry_key.path
event_data.regvalue = values_dict
event_data.source_append = self._SOURCE_APPEND
event = time_events.DateTimeValuesEvent(
last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
parser_mediator.ProduceEventWithEventData(event, event_data)
event_data = SAMUsersWindowsRegistryEventData()
event_data.account_rid = f_value.rid
event_data.comments = comments
event_data.fullname = fullname
event_data.key_path = registry_key.path
event_data.login_count = f_value.number_of_logons
event_data.username = username
if f_value.last_login_time != 0:
date_time = dfdatetime_filetime.Filetime(
timestamp=f_value.last_login_time)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_LOGIN)
parser_mediator.ProduceEventWithEventData(event, event_data)
if f_value.last_password_set_time != 0:
date_time = dfdatetime_filetime.Filetime(
timestamp=f_value.last_password_set_time)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_PASSWORD_RESET)
parser_mediator.ProduceEventWithEventData(event, event_data)
|
python
|
def perform_get_or_create(self, request, *args, **kwargs):
"""Perform "get_or_create" - return existing object if found."""
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
process = serializer.validated_data.get('process')
process_input = request.data.get('input', {})
fill_with_defaults(process_input, process.input_schema)
checksum = get_data_checksum(process_input, process.slug, process.version)
data_qs = Data.objects.filter(
checksum=checksum,
process__persistence__in=[Process.PERSISTENCE_CACHED, Process.PERSISTENCE_TEMP],
)
data_qs = get_objects_for_user(request.user, 'view_data', data_qs)
if data_qs.exists():
data = data_qs.order_by('created').last()
serializer = self.get_serializer(data)
return Response(serializer.data)
|
python
|
def process(argv, scope, interface=None):
"""
Processes any commands within the scope that matches the inputted arguments.
If a subcommand is found, then it is run, and the system exists with the
return value from the command.
:param argv | [<str>, ..]
scope | <dict>
:return (<dict> options, <tuple> arguments)
"""
cmd = command(argv, scope)
if cmd:
sys.exit(cmd.run(argv))
name = PROGRAM_NAME
if interface:
name = interface.name()
_parser = parser(scope, '{0} [options] [<subcommand>] [<arg>]'.format(name))
options, args = _parser.parse_args(argv)
return options.__dict__, args
|
python
|
def purge_all(user=None, fast=False):
"""
Remove all calculations of the given user
"""
user = user or getpass.getuser()
if os.path.exists(datadir):
if fast:
shutil.rmtree(datadir)
print('Removed %s' % datadir)
else:
for fname in os.listdir(datadir):
mo = re.match('calc_(\d+)\.hdf5', fname)
if mo is not None:
calc_id = int(mo.group(1))
purge_one(calc_id, user)
|
python
|
def read_h5ad(filename, backed: Optional[str] = None, chunk_size: int = 6000):
"""Read ``.h5ad``-formatted hdf5 file.
Parameters
----------
filename
File name of data file.
backed : {``None``, ``'r'``, ``'r+'``}
If ``'r'``, load :class:`~anndata.AnnData` in ``backed`` mode instead
of fully loading it into memory (`memory` mode). If you want to modify
backed attributes of the AnnData object, you need to choose ``'r+'``.
chunk_size
Used only when loading sparse dataset that is stored as dense.
Loading iterates through chunks of the dataset of this row size
until it reads the whole dataset.
Higher size means higher memory consumption and higher loading speed.
"""
if isinstance(backed, bool):
# We pass `None`s through to h5py.File, and its default is “a”
# (=“r+”, but create the file if it doesn’t exist)
backed = 'r+' if backed else None
warnings.warn(
"In a future version, read_h5ad will no longer explicitly support "
"boolean arguments. Specify the read mode, or leave `backed=None`.",
DeprecationWarning,
)
if backed:
# open in backed-mode
return AnnData(filename=filename, filemode=backed)
else:
# load everything into memory
constructor_args = _read_args_from_h5ad(filename=filename, chunk_size=chunk_size)
X = constructor_args[0]
dtype = None
if X is not None:
dtype = X.dtype.name # maintain dtype, since 0.7
return AnnData(*_read_args_from_h5ad(filename=filename, chunk_size=chunk_size), dtype=dtype)
|
python
|
def _fit_diagV_noGP(
self, XTY, XTDY, XTFY, YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX, X, Y, X_base, X_res, D, F, run_TRs,
current_vec_U_chlsk_l,
current_a1, current_logSNR2,
idx_param_fitU, idx_param_fitV,
l_idx, n_C, n_T, n_V, n_l, n_run, n_X0, rank):
""" (optional) second step of fitting, full model but without
GP prior on log(SNR). This step is only done if GP prior
is requested.
"""
init_iter = self.init_iter
logger.info('second fitting without GP prior'
' for {} times'.format(init_iter))
# Initial parameters
param0_fitU = np.empty(
np.sum(np.size(v) for v in idx_param_fitU.values()))
param0_fitV = np.empty(np.size(idx_param_fitV['log_SNR2']))
# We cannot use the same logic as the line above because
# idx_param_fitV also includes entries for GP parameters.
param0_fitU[idx_param_fitU['Cholesky']] = \
current_vec_U_chlsk_l.copy()
param0_fitU[idx_param_fitU['a1']] = current_a1.copy()
param0_fitV[idx_param_fitV['log_SNR2']] = \
current_logSNR2[:-1].copy()
L = np.zeros((n_C, rank))
tol = self.tol * 5
for it in range(0, init_iter):
X0TX0, X0TDX0, X0TFX0, XTX0, XTDX0, XTFX0, \
X0TY, X0TDY, X0TFY, X0, X_base, n_X0, _ \
= self._prepare_data_XYX0(
X, Y, X_base, X_res, D, F, run_TRs, no_DC=True)
# fit U, the covariance matrix, together with AR(1) param
param0_fitU[idx_param_fitU['Cholesky']] = \
current_vec_U_chlsk_l \
+ self.random_state_.randn(n_l) \
* np.linalg.norm(current_vec_U_chlsk_l) \
/ n_l**0.5 * np.exp(-it / init_iter * self.anneal_speed - 1)
param0_fitU[idx_param_fitU['a1']] = current_a1
res_fitU = scipy.optimize.minimize(
self._loglike_AR1_diagV_fitU, param0_fitU,
args=(XTX, XTDX, XTFX, YTY_diag, YTDY_diag, YTFY_diag,
XTY, XTDY, XTFY, X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0, X0TY, X0TDY, X0TFY,
current_logSNR2, l_idx, n_C,
n_T, n_V, n_run, n_X0, idx_param_fitU, rank),
method=self.optimizer, jac=True, tol=tol,
options=self.minimize_options)
current_vec_U_chlsk_l = \
res_fitU.x[idx_param_fitU['Cholesky']]
current_a1 = res_fitU.x[idx_param_fitU['a1']]
norm_fitUchange = np.linalg.norm(res_fitU.x - param0_fitU)
logger.debug('norm of parameter change after fitting U: '
'{}'.format(norm_fitUchange))
param0_fitU = res_fitU.x.copy()
# fit V, reflected in the log(SNR^2) of each voxel
rho1 = np.arctan(current_a1) * 2 / np.pi
L[l_idx] = current_vec_U_chlsk_l
X0TAX0, XTAX0, X0TAY, X0TAX0_i, \
XTAcorrX, XTAcorrY, YTAcorrY, \
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL = \
self._precompute_ar1_quad_forms(XTY, XTDY, XTFY,
YTY_diag, YTDY_diag, YTFY_diag,
XTX, XTDX, XTFX,
X0TX0, X0TDX0, X0TFX0,
XTX0, XTDX0, XTFX0,
X0TY, X0TDY, X0TFY,
L, rho1, n_V, n_X0)
res_fitV = scipy.optimize.minimize(
self._loglike_AR1_diagV_fitV, param0_fitV,
args=(X0TAX0, XTAX0, X0TAY,
X0TAX0_i, XTAcorrX, XTAcorrY, YTAcorrY,
LTXTAcorrY, XTAcorrXL, LTXTAcorrXL,
current_vec_U_chlsk_l,
current_a1, l_idx, n_C, n_T, n_V, n_run,
n_X0, idx_param_fitV, rank,
False, False),
method=self.optimizer, jac=True, tol=tol,
options=self.minimize_options)
current_logSNR2[0:n_V - 1] = res_fitV.x
current_logSNR2[-1] = - np.sum(current_logSNR2[0:n_V - 1])
norm_fitVchange = np.linalg.norm(res_fitV.x - param0_fitV)
logger.debug('norm of parameter change after fitting V: '
'{}'.format(norm_fitVchange))
logger.debug('E[log(SNR2)^2]: {}'.format(
np.mean(current_logSNR2**2)))
# The lines below are for debugging purpose.
# If any voxel's log(SNR^2) gets to non-finite number,
# something might be wrong -- could be that the data has
# nothing to do with the design matrix.
if np.any(np.logical_not(np.isfinite(current_logSNR2))):
logger.warning('Initial fitting: iteration {}'.format(it))
logger.warning('current log(SNR^2): '
'{}'.format(current_logSNR2))
logger.warning('log(sigma^2) has non-finite number')
param0_fitV = res_fitV.x.copy()
# Re-estimating X_res from residuals
current_SNR2 = np.exp(current_logSNR2)
if self.auto_nuisance:
LL, LAMBDA_i, LAMBDA, YTAcorrXL_LAMBDA, current_sigma2 \
= self._calc_LL(rho1, LTXTAcorrXL, LTXTAcorrY, YTAcorrY,
X0TAX0, current_SNR2,
n_V, n_T, n_run, rank, n_X0)
betas = current_SNR2 * np.dot(L, YTAcorrXL_LAMBDA.T)
beta0s = np.einsum(
'ijk,ki->ji', X0TAX0_i,
(X0TAY - np.einsum('ikj,ki->ji', XTAX0, betas)))
residuals = Y - np.dot(X, betas) - np.dot(
X_base, beta0s[:np.shape(X_base)[1], :])
X_res = self.nureg_method(
self.n_nureg_).fit_transform(
self.preprocess_residual(residuals))
if norm_fitVchange / np.sqrt(param0_fitV.size) < tol \
and norm_fitUchange / np.sqrt(param0_fitU.size) \
< tol:
break
return current_vec_U_chlsk_l, current_a1, current_logSNR2, X_res
|
python
|
def string_to_config(s):
"""s is a comma-separated list of stores."""
from .machines import Configuration
s = lexer(s)
x = parse_multiple(s, parse_store)
parse_end(s)
return Configuration(x)
|
python
|
def ping(self):
"""Ping a device."""
msg = StandardSend(self._address, COMMAND_PING_0X0F_0X00)
self._send_msg(msg)
|
python
|
def has_user(self, user, role=None, timestamp=False):
"""
Checks whether user has role in entity.
`timestamp` can have following values:
- False - check whether user has role in entity at the moment.
- None - check whether user has permanent role in entity.
- Datetime object - check whether user will have role in entity at specific timestamp.
"""
permissions = self.permissions.filter(user=user, is_active=True)
if role is not None:
permissions = permissions.filter(role=role)
if timestamp is None:
permissions = permissions.filter(expiration_time=None)
elif timestamp:
permissions = permissions.filter(Q(expiration_time=None) | Q(expiration_time__gte=timestamp))
return permissions.exists()
|
python
|
def add_rup_params(self, rupture):
"""
Add .REQUIRES_RUPTURE_PARAMETERS to the rupture
"""
for param in self.REQUIRES_RUPTURE_PARAMETERS:
if param == 'mag':
value = rupture.mag
elif param == 'strike':
value = rupture.surface.get_strike()
elif param == 'dip':
value = rupture.surface.get_dip()
elif param == 'rake':
value = rupture.rake
elif param == 'ztor':
value = rupture.surface.get_top_edge_depth()
elif param == 'hypo_lon':
value = rupture.hypocenter.longitude
elif param == 'hypo_lat':
value = rupture.hypocenter.latitude
elif param == 'hypo_depth':
value = rupture.hypocenter.depth
elif param == 'width':
value = rupture.surface.get_width()
else:
raise ValueError('%s requires unknown rupture parameter %r' %
(type(self).__name__, param))
setattr(rupture, param, value)
|
python
|
def configure_createfor(self, ns, definition):
"""
Register a create-for relation endpoint.
The definition's func should be a create function, which must:
- accept kwargs for the new instance creation parameters
- return the created instance
:param ns: the namespace
:param definition: the endpoint definition
"""
@self.add_route(ns.relation_path, Operation.CreateFor, ns)
@request(definition.request_schema)
@response(definition.response_schema)
@wraps(definition.func)
def create(**path_data):
request_data = load_request_data(definition.request_schema)
response_data = require_response_data(definition.func(**merge_data(path_data, request_data)))
headers = encode_id_header(response_data)
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data(
definition.response_schema,
response_data,
Operation.CreateFor.value.default_code,
headers=headers,
response_format=response_format,
)
create.__doc__ = "Create a new {} relative to a {}".format(pluralize(ns.object_name), ns.subject_name)
|
python
|
def merge_files(service, skip_on_user_group_error=False):
"""
Given a prefix, find all templates below; merge with parameters; write to "dest"
Args:
service: "<service>", "all", or "ssh"
skip_on_user_group_error: True or False
For S3, full path becomes:
s3://ellation-cx-global-configs/<service>/templates/<filename>
s3://ellation-cx-global-configs/<service>/parameters/<filename>.parameters.<yaml|yml|json>
For filesystem, full path becomes:
/vagrant/configs/<service>/templates/<filename>
/vagrant/configs/<service>/parameters/<filename>.parameters.<yaml|yml|json>
"""
if WHERE == "ec2":
config_reader = EFInstanceinitConfigReader("s3", service, log_info, RESOURCES["s3"])
resolver = EFTemplateResolver()
elif WHERE == "virtualbox-kvm":
config_path = "{}/{}".format(VIRTUALBOX_CONFIG_ROOT, service)
config_reader = EFInstanceinitConfigReader("file", config_path, log_info)
environment = EFConfig.VAGRANT_ENV
resolver = EFTemplateResolver(env=environment, profile=get_account_alias(environment),
region=EFConfig.DEFAULT_REGION, service=service)
while config_reader.next():
log_info("checking: {}".format(config_reader.current_key))
# if 'dest' for the current object contains an 'environments' list, check it
dest = config_reader.dest
if "environments" in dest:
if not resolver.resolved["ENV_SHORT"] in dest["environments"]:
log_info("Environment: {} not enabled for {}".format(
resolver.resolved["ENV_SHORT"], config_reader.current_key)
)
continue
# If 'dest' for the current object contains a user_group that hasn't been created in the environment yet and the
# flag is set to True to skip, log the error and move onto the next config file without blowing up.
if skip_on_user_group_error:
user, group = get_user_group(dest)
try:
getpwnam(user).pw_uid
except KeyError:
log_info("File specifies user {} that doesn't exist in environment. Skipping config file.".format(user))
continue
try:
getgrnam(group).gr_gid
except KeyError:
log_info("File specifies group {} that doesn't exist in environment. Skipping config file.".format(group))
continue
# Process the template_body - apply context + parameters
log_info("Resolving template")
resolver.load(config_reader.template, config_reader.parameters)
rendered_body = resolver.render()
if not resolver.resolved_ok():
critical("Couldn't resolve all symbols; template has leftover {{ or }}: {}".format(resolver.unresolved_symbols()))
# Write the rendered file
dir_path = normpath(dirname(dest["path"]))
# Resolved OK. try to write the template
log_info("make directories: {} {}".format(dir_path, dest["dir_perm"]))
try:
makedirs(dir_path, int(dest["dir_perm"], 8))
except OSError as error:
if error.errno != 17:
critical("Error making directories {}".format(repr(error)))
log_info("open: " + dest["path"] + ",w+")
try:
outfile = open(dest["path"], 'w+')
log_info("write")
outfile.write(rendered_body)
log_info("close")
outfile.close()
log_info("chmod file to: " + dest["file_perm"])
chmod(dest["path"], int(dest["file_perm"], 8))
user, group = get_user_group(dest)
uid = getpwnam(user).pw_uid
gid = getgrnam(group).gr_gid
log_info("chown last directory in path to: " + dest["user_group"])
chown(dir_path, uid, gid)
log_info("chown file to: " + dest["user_group"])
chown(dest["path"], uid, gid)
except Exception as error:
critical("Error writing file: " + dest["path"] + ": " + repr(error))
|
java
|
public static int distance (int a, int b, int N)
{
return (a > b) ? Math.min(a-b, b+N-a) : Math.min(b-a, a+N-b);
}
|
java
|
@Nullable
@ReturnsMutableCopy
public static Object [] getAsObjectArray (@Nullable final Collection <?> aCollection)
{
if (CollectionHelper.isEmpty (aCollection))
return null;
final Object [] ret = new Object [aCollection.size ()];
return aCollection.toArray (ret);
}
|
python
|
def widen(self, other):
""" Widen current range. """
if self.low < other.low:
low = -float("inf")
else:
low = self.low
if self.high > other.high:
high = float("inf")
else:
high = self.high
return Interval(low, high)
|
java
|
private Object[] findInBundle(Spec specToOpen,
Spec specToFind,
String variant,
int direction) {
// assert(specToOpen.isLocale());
ResourceBundle res = specToOpen.getBundle();
if (res == null) {
// This means that the bundle's locale does not match
// the current level of iteration for the spec.
return null;
}
for (int pass=0; pass<2; ++pass) {
StringBuilder tag = new StringBuilder();
// First try either TransliteratorTo_xxx or
// TransliterateFrom_xxx, then try the bidirectional
// Transliterate_xxx. This precedence order is arbitrary
// but must be consistent and documented.
if (pass == 0) {
tag.append(direction == Transliterator.FORWARD ?
"TransliterateTo" : "TransliterateFrom");
} else {
tag.append("Transliterate");
}
tag.append(specToFind.get().toUpperCase(Locale.ENGLISH));
try {
// The Transliterate*_xxx resource is an array of
// strings of the format { <v0>, <r0>, ... }. Each
// <vi> is a variant name, and each <ri> is a rule.
String[] subres = res.getStringArray(tag.toString());
// assert(subres != null);
// assert(subres.length % 2 == 0);
int i = 0;
if (variant.length() != 0) {
for (i=0; i<subres.length; i+= 2) {
if (subres[i].equalsIgnoreCase(variant)) {
break;
}
}
}
if (i < subres.length) {
// We have a match, or there is no variant and i == 0.
// We have succeeded in loading a string from the
// locale resources. Return the rule string which
// will itself become the registry entry.
// The direction is always forward for the
// TransliterateTo_xxx and TransliterateFrom_xxx
// items; those are unidirectional forward rules.
// For the bidirectional Transliterate_xxx items,
// the direction is the value passed in to this
// function.
int dir = (pass == 0) ? Transliterator.FORWARD : direction;
return new Object[] { new LocaleEntry(subres[i+1], dir) };
}
} catch (MissingResourceException e) {
///CLOVER:OFF
if (DEBUG) System.out.println("missing resource: " + e);
///CLOVER:ON
}
}
// If we get here we had a missing resource exception or we
// failed to find a desired variant.
return null;
}
|
python
|
def resolve_requirements(self, interpreter, req_libs):
"""Requirements resolution for PEX files.
:param interpreter: Resolve against this :class:`PythonInterpreter`.
:param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
:returns: a PEX containing target requirements and any specified python dist targets.
"""
with self.invalidated(req_libs) as invalidation_check:
# If there are no relevant targets, we still go through the motions of resolving
# an empty set of requirements, to prevent downstream tasks from having to check
# for this special case.
if invalidation_check.all_vts:
target_set_id = VersionedTargetSet.from_versioned_targets(
invalidation_check.all_vts).cache_key.hash
else:
target_set_id = 'no_targets'
# We need to ensure that we are resolving for only the current platform if we are
# including local python dist targets that have native extensions.
targets_by_platform = pex_build_util.targets_by_platform(self.context.targets(), self._python_setup)
if self._python_native_code_settings.check_build_for_current_platform_only(targets_by_platform):
platforms = ['current']
else:
platforms = list(sorted(targets_by_platform.keys()))
path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
# Note that we check for the existence of the directory, instead of for invalid_vts,
# to cover the empty case.
if not os.path.isdir(path):
with safe_concurrent_creation(path) as safe_path:
pex_builder = PexBuilderWrapper.Factory.create(
builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True),
log=self.context.log)
pex_builder.add_requirement_libs_from(req_libs, platforms=platforms)
pex_builder.freeze()
return PEX(path, interpreter=interpreter)
|
java
|
public ArrayList<Integer> getAllIndices() {
ArrayList<Integer> res = new ArrayList<Integer>(this.realTSindex.size());
res.addAll(this.realTSindex.keySet());
Collections.sort(res);
return res;
}
|
java
|
public void write(Writer out)
throws XmlModelWriterException, IOException
{
try
{
DOMSource domSource = new DOMSource(_doc);
StreamResult streamResult = new StreamResult(out);
TransformerFactory tf = TransformerFactory.newInstance();
tf.setAttribute("indent-number", new Integer(INDENT_LEN));
Transformer serializer = tf.newTransformer();
if (_publicID != null) {
serializer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, _publicID);
}
if (_systemID != null) {
serializer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, _systemID);
}
serializer.setOutputProperty(OutputKeys.INDENT, "yes");
serializer.transform(domSource, streamResult);
}
catch (TransformerException e)
{
throw new XmlModelWriterException(e);
}
}
|
python
|
def qteKeyPress(self, msgObj):
"""
Record the key presses reported by the key handler.
"""
# Unpack the data structure.
(srcObj, keysequence, macroName) = msgObj.data
# Append the last QKeyEvent object to the so far recorded
# sequence. Note that both ``keysequence`` and
# ``self.recorded_keysequence`` are ``QtmacsKeysequence``
# instances.
last_key = keysequence.toQKeyEventList()[-1]
self.recorded_keysequence.appendQKeyEvent(last_key)
|
java
|
protected void newBondData() {
bondCounter = 0;
bondid = new ArrayList<String>();
bondARef1 = new ArrayList<String>();
bondARef2 = new ArrayList<String>();
order = new ArrayList<String>();
bondStereo = new ArrayList<String>();
bondCustomProperty = new Hashtable<String, Map<String, String>>();
bondDictRefs = new ArrayList<String>();
bondElid = new ArrayList<String>();
bondAromaticity = new ArrayList<Boolean>();
}
|
python
|
def teardown(self):
"""
Teardown the link.
Removes endpoints from properties I{links} collection.
@return: self
@rtype: L{Link}
"""
pA, pB = self.endpoints
if pA in pB.links:
pB.links.remove(pA)
if pB in pA.links:
pA.links.remove(pB)
return self
|
java
|
public Path resolveConflictsIfNecessary(Path ciphertextPath, String dirId) throws IOException {
String ciphertextFileName = ciphertextPath.getFileName().toString();
String basename = StringUtils.removeEnd(ciphertextFileName, LONG_NAME_FILE_EXT);
Matcher m = CIPHERTEXT_FILENAME_PATTERN.matcher(basename);
if (!m.matches() && m.find(0)) {
// no full match, but still contains base32 -> partial match
return resolveConflict(ciphertextPath, m.group(0), dirId);
} else {
// full match or no match at all -> nothing to resolve
return ciphertextPath;
}
}
|
java
|
@NonNull
public RequestCreator transform(@NonNull List<? extends Transformation> transformations) {
data.transform(transformations);
return this;
}
|
java
|
public final void addTriangle(double x1, double y1, double x2, double y2, double x3, double y3) {
addEdge(x1, y1, x2, y2);
addEdge(x2, y2, x3, y3);
addEdge(x1, y1, x3, y3);
}
|
python
|
def xpath(self, selector: str, *, clean: bool = False, first: bool = False, _encoding: str = None) -> _XPath:
"""Given an XPath selector, returns a list of
:class:`Element <Element>` objects or a single one.
:param selector: XPath Selector to use.
:param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags.
:param first: Whether or not to return just the first result.
:param _encoding: The encoding format.
If a sub-selector is specified (e.g. ``//a/@href``), a simple
list of results is returned.
See W3School's `XPath Examples
<https://www.w3schools.com/xml/xpath_examples.asp>`_
for more details.
If ``first`` is ``True``, only returns the first
:class:`Element <Element>` found.
"""
selected = self.lxml.xpath(selector)
elements = [
Element(element=selection, url=self.url, default_encoding=_encoding or self.encoding)
if not isinstance(selection, etree._ElementUnicodeResult) else str(selection)
for selection in selected
]
# Sanitize the found HTML.
if clean:
elements_copy = elements.copy()
elements = []
for element in elements_copy:
element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml))
elements.append(element)
return _get_first_or_list(elements, first)
|
python
|
def raw_command(netfn, command, bridge_request=None, data=(), retry=True, delay_xmit=None, **kwargs):
'''
Send raw ipmi command
This allows arbitrary IPMI bytes to be issued. This is commonly used
for certain vendor specific commands.
:param netfn: Net function number
:param command: Command value
:param bridge_request: The target slave address and channel number for
the bridge request.
:param data: Command data as a tuple or list
:param kwargs:
- api_host=127.0.0.1
- api_user=admin
- api_pass=example
- api_port=623
- api_kg=None
:returns: dict -- The response from IPMI device
CLI Examples:
.. code-block:: bash
salt-call ipmi.raw_command netfn=0x06 command=0x46 data=[0x02]
# this will return the name of the user with id 2 in bytes
'''
with _IpmiSession(**kwargs) as s:
r = s.raw_command(netfn=int(netfn),
command=int(command),
bridge_request=bridge_request,
data=data,
retry=retry,
delay_xmit=delay_xmit)
return r
|
python
|
def get_edit_url(self):
"""Get model object edit url"""
return reverse('trionyx:model-edit', kwargs={
'app': self.get_app_label(),
'model': self.get_model_name(),
'pk': self.object.id
})
|
java
|
private void listJobs() throws IOException {
JobStatus[] jobs = jobsToComplete();
if (jobs == null)
jobs = new JobStatus[0];
System.out.printf("%d jobs currently running\n", jobs.length);
displayJobList(jobs);
}
|
python
|
def _create_if_necessary(directory):
"""Create the specified directory, if necessary.
:param str directory: The directory to use.
:rtype: bool
:returns: True if no errors occurred and the directory was created or
existed beforehand, False otherwise.
"""
if not os.path.isabs(directory):
log.debug("Got non-absolute path: %s" % directory)
directory = os.path.abspath(directory)
if not os.path.isdir(directory):
log.info("Creating directory: %s" % directory)
try:
os.makedirs(directory, 0x1C0)
except OSError as ose:
log.error(ose, exc_info=1)
return False
else:
log.debug("Created directory.")
return True
|
java
|
public Map<String, String> getUriVariablesForSearchWithPost(BullhornEntityInfo entityInfo, Set<String> fieldSet, SearchParams params) {
Map<String, String> uriVariables = params.getParameterMap();
this.addCommonUriVariables(fieldSet, entityInfo, uriVariables);
return uriVariables;
}
|
python
|
def _submit(ctx, parent_id, name, url, func, *args, **kwargs):
"""Submit a function to a cluster
Parameters
----------
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
"""
parent_info = r_client.get(parent_id)
if parent_info is None:
parent_info = create_info('unnamed', 'group', id=parent_id)
parent_id = parent_info['id']
r_client.set(parent_id, json.dumps(parent_info))
parent_pubsub_key = parent_id + ':pubsub'
job_info = create_info(name, 'job', url=url, parent=parent_id,
context=ctx.name, store=True)
job_info['status'] = 'Queued'
job_id = job_info['id']
with r_client.pipeline() as pipe:
pipe.set(job_id, json.dumps(job_info))
pipe.publish(parent_pubsub_key, json.dumps({'add': [job_id]}))
pipe.execute()
ar = ctx.bv.apply_async(_redis_wrap, job_info, func, *args, **kwargs)
return job_id, parent_id, ar
|
java
|
public <T> Key<T> manualRefToKey(final Class<T> type, final Object id) {
return id == null ? null : new Key<T>(type, getCollectionName(type), id);
}
|
java
|
public static base_response add(nitro_service client, location resource) throws Exception {
location addresource = new location();
addresource.ipfrom = resource.ipfrom;
addresource.ipto = resource.ipto;
addresource.preferredlocation = resource.preferredlocation;
addresource.longitude = resource.longitude;
addresource.latitude = resource.latitude;
return addresource.add_resource(client);
}
|
java
|
public static void generateWhereCondition(final MethodSpec.Builder methodBuilder, final SQLiteModelMethod method, boolean sqlWhereParamsAlreadyDefined) {
final JQL jql = method.jql;
final JQLChecker jqlChecker = JQLChecker.getInstance();
if (jql.isWhereConditions()) {
// parameters extracted from query
final One<String> whereStatement = new One<>();
final One<Boolean> alreadyFoundWhereStatement = new One<>(false);
// put in whereStatement value of where statement.
jqlChecker.replaceVariableStatements(method, method.jql.value, new JQLReplaceVariableStatementListenerImpl() {
@Override
public String onWhere(String statement) {
if (alreadyFoundWhereStatement.value0 == false) {
whereStatement.value0 = statement;
alreadyFoundWhereStatement.value0 = true;
return "";
} else {
// DO NOTHING
return null;
}
}
});
final StringBuilder dynamicCodeBlock = new StringBuilder();
methodBuilder.addCode("\n// manage WHERE arguments -- BEGIN\n");
String sqlWhere = jqlChecker.replaceFromVariableStatement(method, whereStatement.value0, new JQLReplacerListenerImpl(method) {
@Override
public String onColumnName(String columnName) {
SQLProperty tempProperty = currentEntity.get(columnName);
AssertKripton.assertTrueOrUnknownPropertyInJQLException(tempProperty != null, method, columnName);
return tempProperty.columnName;
}
@Override
public String onDynamicSQL(JQLDynamicStatementType dynamicStatement) {
method.jql.dynamicReplace.put(dynamicStatement, "#{" + dynamicStatement + "}");
return "";
}
@Override
public String onBindParameter(String bindParameterName, boolean inStatement) {
if (!inStatement) {
return "?";
} else {
methodBuilder.addComment("need to use $T operations", SpreadUtils.class);
dynamicCodeBlock.append((dynamicCodeBlock.length() > 0 ? "," : "") + String.format("SpreadUtils.generateQuestion(%s)", method.findParameterNameByAlias(bindParameterName)));
return "%s";
}
}
});
methodBuilder.addCode("\n// manage WHERE statement\n");
String value = sqlWhere;
String valueToReplace = jql.dynamicReplace.get(JQLDynamicStatementType.DYNAMIC_WHERE);
if (method.jql.operationType == JQLType.SELECT) {
String prefix = "";
String suffix = "";
// we have to include WHERE keywords
if (jql.isDynamicSpreadConditions()) {
prefix = "String.format(";
suffix = String.format(",%s)", dynamicCodeBlock.toString());
}
if (jql.isStaticWhereConditions() && !jql.isDynamicWhereConditions()) {
// case static statement and NO dynamic
methodBuilder.addStatement("String _sqlWhereStatement=" + prefix + "$S" + suffix, value);
} else if (jql.isStaticWhereConditions() && jql.isDynamicWhereConditions()) {
methodBuilder.addStatement("String _sqlWhereStatement=" + prefix + "$S" + suffix + "+$T.ifNotEmptyAppend($L,\" $L \")", value.replace(valueToReplace, ""), StringUtils.class,
"_sqlDynamicWhere", method.dynamicWherePrepend);
} else if (!jql.isStaticWhereConditions() && jql.isDynamicWhereConditions()) {
// in this case no spread is managed
methodBuilder.addStatement("String _sqlWhereStatement=$T.ifNotEmptyAppend($L, \" $L \")", StringUtils.class, "_sqlDynamicWhere", JQLKeywords.WHERE_KEYWORD);
}
} else {
// we DON'T have to include WHERE keywords
value = value.replace(" " + JQLKeywords.WHERE_KEYWORD, "");
if (jql.isStaticWhereConditions() && !jql.isDynamicWhereConditions()) {
// case static statement and NO dynamic
methodBuilder.addStatement("String _sqlWhereStatement=$S", value);
} else if (jql.isStaticWhereConditions() && jql.isDynamicWhereConditions()) {
methodBuilder.addStatement("String _sqlWhereStatement=$S+$T.ifNotEmptyAppend($L,\" $L \")", value.replace(valueToReplace, ""), StringUtils.class, "_sqlDynamicWhere",
method.dynamicWherePrepend);
} else if (!jql.isStaticWhereConditions() && jql.isDynamicWhereConditions()) {
methodBuilder.addStatement("String _sqlWhereStatement=$T.ifNotEmptyAppend($L, \" \")", StringUtils.class, "_sqlDynamicWhere");
}
}
methodBuilder.addStatement("_sqlBuilder.append($L)", "_sqlWhereStatement");
methodBuilder.addCode("\n// manage WHERE arguments -- END\n");
} else {
// in every situation we need it
methodBuilder.addStatement("String _sqlWhereStatement=\"\"");
}
// manage where arguments
if (method.hasDynamicWhereConditions() && method.hasDynamicWhereArgs()) {
// ASSERT: only with dynamic where conditions
methodBuilder.beginControlFlow("if ($T.hasText(_sqlDynamicWhere) && _sqlDynamicWhereArgs!=null)", StringUtils.class);
if (method.hasDynamicWhereConditions()) {
methodBuilder.beginControlFlow("for (String _arg: _sqlDynamicWhereArgs)");
// methodBuilder.addStatement("_sqlWhereParams.add(_arg)");
methodBuilder.addStatement("_contentValues.addWhereArgs(_arg)");
methodBuilder.endControlFlow();
}
methodBuilder.endControlFlow();
}
}
|
python
|
def comments(recid):
"""Display comments."""
from invenio_access.local_config import VIEWRESTRCOLL
from invenio_access.mailcookie import \
mail_cookie_create_authorize_action
from .api import check_user_can_view_comments
auth_code, auth_msg = check_user_can_view_comments(current_user, recid)
if auth_code and current_user.is_guest:
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {
'collection': g.collection})
url_args = {'action': cookie, 'ln': g.ln, 'referer': request.referrer}
flash(_("Authorization failure"), 'error')
return redirect(url_for('webaccount.login', **url_args))
elif auth_code:
flash(auth_msg, 'error')
abort(401)
# FIXME check restricted discussion
comments = CmtRECORDCOMMENT.query.filter(db.and_(
CmtRECORDCOMMENT.id_bibrec == recid,
CmtRECORDCOMMENT.in_reply_to_id_cmtRECORDCOMMENT == 0,
CmtRECORDCOMMENT.star_score == 0
)).order_by(CmtRECORDCOMMENT.date_creation).all()
return render_template('comments/comments.html', comments=comments,
option='comments')
|
java
|
private void resetIdentifierType()
{
Expression expression = getExpression();
if( expression == null )
{
return;
}
Identifier identifier = getIdentifier();
if( identifier == null )
{
return;
}
ISymbol symbol = identifier.getSymbol();
if( symbol.getType() == GosuParserTypes.NULL_TYPE() )
{
symbol.setType( expression.getType() );
}
}
|
java
|
public static void runExample(
AdWordsServicesInterface adWordsServices, AdWordsSession session, long serviceLinkId)
throws RemoteException {
// Get the CustomerService.
CustomerServiceInterface customerService =
adWordsServices.get(session, CustomerServiceInterface.class);
// Create the operation to set the status to ACTIVE.
ServiceLinkOperation op = new ServiceLinkOperation();
op.setOperator(Operator.SET);
ServiceLink serviceLink = new ServiceLink();
serviceLink.setServiceLinkId(serviceLinkId);
serviceLink.setServiceType(ServiceType.MERCHANT_CENTER);
serviceLink.setLinkStatus(ServiceLinkLinkStatus.ACTIVE);
op.setOperand(serviceLink);
// Update the service link.
ServiceLink[] mutatedServiceLinks =
customerService.mutateServiceLinks(new ServiceLinkOperation[] {op});
// Display the results.
for (ServiceLink mutatedServiceLink : mutatedServiceLinks) {
System.out.printf(
"Service link with service link ID %d, type '%s' updated to status: %s.%n",
mutatedServiceLink.getServiceLinkId(),
mutatedServiceLink.getServiceType(),
mutatedServiceLink.getLinkStatus());
}
}
|
python
|
def cli(execute, region, aws_access_key_id, aws_secret_access_key,
s3_staging_dir, athenaclirc, profile, database):
'''A Athena terminal client with auto-completion and syntax highlighting.
\b
Examples:
- athenacli
- athenacli my_database
'''
if (athenaclirc == ATHENACLIRC) and (not os.path.exists(os.path.expanduser(ATHENACLIRC))):
err_msg = '''
Welcome to athenacli!
It seems this is your first time to run athenacli,
we generated a default config file for you
%s
Please change it accordingly, and run athenacli again.
''' % ATHENACLIRC
print(err_msg)
write_default_config(DEFAULT_CONFIG_FILE, ATHENACLIRC)
sys.exit(1)
if profile != 'default':
os.environ['AWS_PROFILE'] = profile
athenacli = AthenaCli(
region=region,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key= aws_secret_access_key,
s3_staging_dir=s3_staging_dir,
athenaclirc=athenaclirc,
profile=profile,
database=database
)
# --execute argument
if execute:
if os.path.exists(execute):
with open(execute) as f:
query = f.read()
else:
query = execute
try:
athenacli.formatter.format_name = 'csv'
athenacli.run_query(query)
exit(0)
except Exception as e:
click.secho(str(e), err=True, fg='red')
exit(1)
athenacli.run_cli()
|
java
|
public static double getDouble(String pStr) {
if (isEmpty(pStr)) {
return 0.0;
}
double value = 0.0;
pStr = pStr.substring(0, pStr.length() - 2) + "." + pStr.substring(pStr.length() - 2);
try {
value = Double.parseDouble(pStr);
} catch (NumberFormatException ex) {
}
return value;
}
|
python
|
def get_namespace(self, uri):
"""Return a :class:`.Namespace` corresponding to the given ``uri``.
If the given ``uri`` is a relative URI (i.e. it does not
contain a leading slash ``/``), the ``uri`` is adjusted to
be relative to the ``uri`` of the namespace itself. This
method is therefore mostly useful off of the built-in
``local`` namespace, described in :ref:`namespace_local`.
In
most cases, a template wouldn't need this function, and
should instead use the ``<%namespace>`` tag to load
namespaces. However, since all ``<%namespace>`` tags are
evaluated before the body of a template ever runs,
this method can be used to locate namespaces using
expressions that were generated within the body code of
the template, or to conditionally use a particular
namespace.
"""
key = (self, uri)
if key in self.context.namespaces:
return self.context.namespaces[key]
else:
ns = TemplateNamespace(uri, self.context._copy(),
templateuri=uri,
calling_uri=self._templateuri)
self.context.namespaces[key] = ns
return ns
|
python
|
def _parse_response(xml):
"""
Attempt to parse the SOAP response and return a python object
Raise a WSManException if a Fault is found
"""
try:
soap_response = xmltodict.parse(xml, process_namespaces=True, namespaces=Service.Namespaces)
except Exception:
logging.debug('unable to parse the xml response: %s', xml)
raise WSManException("the remote host returned an invalid soap response")
# the delete response has an empty body
body = soap_response['soap:Envelope']['soap:Body']
if body is not None and 'soap:Fault' in body:
raise WSManOperationException(body['soap:Fault']['soap:Reason']['soap:Text']['#text'])
return body
|
python
|
def _lock(self):
'''
Locks, or returns False if already locked
'''
if not self._is_locked():
with open(self._lck,'w') as fh:
if self._devel: self.logger.debug("Locking")
fh.write(str(os.getpid()))
return True
else:
return False
|
java
|
public static Crouton make(Activity activity, View customView, ViewGroup viewGroup) {
return new Crouton(activity, customView, viewGroup);
}
|
python
|
def get_formatted_output(self, format_type=FormatType.JSON):
"""
:param format_type: The format need to output.
:return: formatted output.
:raise ConversionError: data can't be converted to the specified format_type.
"""
if format_type not in [FormatType.JSON, FormatType.YAML]:
raise ArgumentInvalid('"format_type" should be one of FormatType.JSON / YAML!')
elif format_type is FormatType.JSON:
return self._format_json()
elif format_type is FormatType.YAML:
return self._format_yaml()
|
java
|
public static byte[] insert (byte[] values, byte value, int index)
{
byte[] nvalues = new byte[values.length+1];
if (index > 0) {
System.arraycopy(values, 0, nvalues, 0, index);
}
nvalues[index] = value;
if (index < values.length) {
System.arraycopy(values, index, nvalues, index+1, values.length-index);
}
return nvalues;
}
|
python
|
def hours_estimate(self, branch='master', grouping_window=0.5, single_commit_hours=0.5, limit=None, days=None, committer=True, by=None, ignore_globs=None, include_globs=None):
"""
inspired by: https://github.com/kimmobrunfeldt/git-hours/blob/8aaeee237cb9d9028e7a2592a25ad8468b1f45e4/index.js#L114-L143
Iterates through the commit history of repo to estimate the time commitement of each author or committer over
the course of time indicated by limit/extensions/days/etc.
:param branch: the branch to return commits for
:param limit: (optional, default=None) a maximum number of commits to return, None for no limit
:param grouping_window: (optional, default=0.5 hours) the threhold for how close two commits need to be to consider them part of one coding session
:param single_commit_hours: (optional, default 0.5 hours) the time range to associate with one single commit
:param days: (optional, default=None) number of days to return, if limit is None
:param committer: (optional, default=True) whether to use committer vs. author
:param ignore_globs: (optional, default=None) a list of globs to ignore, default none excludes nothing
:param include_globs: (optinal, default=None) a list of globs to include, default of None includes everything.
:return: DataFrame
"""
if limit is not None:
limit = int(limit / len(self.repo_dirs))
if committer:
com = 'committer'
else:
com = 'author'
df = pd.DataFrame(columns=[com, 'hours', 'repository'])
for repo in self.repos:
try:
ch = repo.hours_estimate(
branch,
grouping_window=grouping_window,
single_commit_hours=single_commit_hours,
limit=limit,
days=days,
committer=committer,
ignore_globs=ignore_globs,
include_globs=include_globs
)
ch['repository'] = repo.repo_name
df = df.append(ch)
except GitCommandError:
print('Warning! Repo: %s seems to not have the branch: %s' % (repo, branch))
df.reset_index()
if by == 'committer' or by == 'author':
df = df.groupby(com).agg({'hours': sum})
df = df.reset_index()
elif by == 'repository':
df = df.groupby('repository').agg({'hours': sum})
df = df.reset_index()
return df
|
java
|
@SuppressWarnings("unchecked")
public Duration<U> plus(TimeSpan<? extends U> timespan) {
Duration<U> result = merge(this, timespan);
if (result == null) {
long[] sums = new long[4];
sums[0] = 0;
sums[1] = 0;
sums[2] = 0;
sums[3] = 0;
if (summarize(this, sums) && summarize(timespan, sums)) {
long months = sums[0];
long days = sums[1];
long secs = sums[2];
long nanos = sums[3];
long daytime;
if (nanos != 0) {
daytime = nanos;
} else if (secs != 0) {
daytime = secs;
} else {
daytime = days;
}
if (!hasMixedSigns(months, daytime)) {
boolean neg = ((months < 0) || (daytime < 0));
if (neg) {
months = MathUtils.safeNegate(months);
days = MathUtils.safeNegate(days);
secs = MathUtils.safeNegate(secs);
nanos = MathUtils.safeNegate(nanos);
}
long years = months / 12;
months = months % 12;
long nanosecs = 0;
if (nanos != 0) {
nanosecs = nanos % MRD;
secs = nanos / MRD;
}
long hours = secs / 3600;
secs = secs % 3600;
long minutes = secs / 60;
secs = secs % 60;
Map<IsoUnit, Long> map = new HashMap<>();
map.put(YEARS, years);
map.put(MONTHS, months);
map.put(DAYS, days);
map.put(HOURS, hours);
map.put(MINUTES, minutes);
map.put(SECONDS, secs);
map.put(NANOS, nanosecs);
return (Duration<U>) Duration.create(map, neg);
}
}
throw new IllegalStateException(
"Mixed signs in result time span not allowed: "
+ this
+ " PLUS "
+ timespan);
}
return result;
}
|
java
|
private boolean drawPoint(BoundingBox boundingBox,
ProjectionTransform transform, FeatureTileGraphics graphics,
Point point, FeatureStyle featureStyle) {
boolean drawn = false;
Point projectedPoint = transform.transform(point);
float x = TileBoundingBoxUtils.getXPixel(tileWidth, boundingBox,
projectedPoint.getX());
float y = TileBoundingBoxUtils.getYPixel(tileHeight, boundingBox,
projectedPoint.getY());
if (featureStyle != null && featureStyle.hasIcon()) {
IconRow iconRow = featureStyle.getIcon();
BufferedImage icon = getIcon(iconRow);
int width = icon.getWidth();
int height = icon.getHeight();
if (x >= 0 - width && x <= tileWidth + width && y >= 0 - height
&& y <= tileHeight + height) {
float anchorU = (float) iconRow.getAnchorUOrDefault();
float anchorV = (float) iconRow.getAnchorVOrDefault();
int iconX = Math.round(x - (anchorU * width));
int iconY = Math.round(y - (anchorV * height));
Graphics2D iconGraphics = graphics.getIconGraphics();
iconGraphics.drawImage(icon, iconX, iconY, null);
drawn = true;
}
} else if (pointIcon != null) {
int width = Math.round(this.scale * pointIcon.getWidth());
int height = Math.round(this.scale * pointIcon.getHeight());
if (x >= 0 - width && x <= tileWidth + width && y >= 0 - height
&& y <= tileHeight + height) {
int iconX = Math.round(x - this.scale * pointIcon.getXOffset());
int iconY = Math.round(y - this.scale * pointIcon.getYOffset());
Graphics2D iconGraphics = graphics.getIconGraphics();
iconGraphics.drawImage(pointIcon.getIcon(), iconX, iconY,
width, height, null);
drawn = true;
}
} else {
Float radius = null;
if (featureStyle != null) {
StyleRow styleRow = featureStyle.getStyle();
if (styleRow != null) {
radius = this.scale
* (float) (styleRow.getWidthOrDefault() / 2.0f);
}
}
if (radius == null) {
radius = this.scale * pointRadius;
}
if (x >= 0 - radius && x <= tileWidth + radius && y >= 0 - radius
&& y <= tileHeight + radius) {
Graphics2D pointGraphics = graphics.getPointGraphics();
Paint pointPaint = getPointPaint(featureStyle);
pointGraphics.setColor(pointPaint.getColor());
int circleX = Math.round(x - radius);
int circleY = Math.round(y - radius);
int diameter = Math.round(radius * 2);
pointGraphics.fillOval(circleX, circleY, diameter, diameter);
drawn = true;
}
}
return drawn;
}
|
python
|
def print_alignment(mapping, instance1, instance2):
"""
print the alignment based on a node mapping
Args:
mapping: current node mapping list
instance1: nodes of AMR 1
instance2: nodes of AMR 2
"""
result = []
for instance1_item, m in zip(instance1, mapping):
r = instance1_item[1] + "(" + instance1_item[2] + ")"
if m == -1:
r += "-Null"
else:
instance2_item = instance2[m]
r += "-" + instance2_item[1] + "(" + instance2_item[2] + ")"
result.append(r)
return " ".join(result)
|
java
|
public void fatal( String messagePattern, Object arg1, Object arg2 )
{
if( m_delegate.isFatalEnabled() )
{
String msgStr = MessageFormatter.format( messagePattern, arg1, arg2 );
m_delegate.fatal( msgStr, null );
}
}
|
python
|
def strip_prompt(self, a_string):
"""Strip the trailing router prompt from the output."""
expect_string = r"^(OK|ERROR|Command not recognized\.)$"
response_list = a_string.split(self.RESPONSE_RETURN)
last_line = response_list[-1]
if re.search(expect_string, last_line):
return self.RESPONSE_RETURN.join(response_list[:-1])
else:
return a_string
|
java
|
private void renameColumn(CmsSetupDb dbCon, String tablename, String oldname, String newname) throws SQLException {
System.out.println(new Exception().getStackTrace()[0].toString());
if (dbCon.hasTableOrColumn(tablename, oldname)) {
String query = readQuery(QUERY_RENAME_COLUMN);
Map<String, String> replacer = new HashMap<String, String>();
replacer.put(REPLACEMENT_TABLENAME, tablename);
replacer.put(REPLACEMENT_COLUMN, oldname);
replacer.put(REPLACEMENT_NEW_COLUMN, newname);
dbCon.updateSqlStatement(query, replacer, null);
} else {
System.out.println("column " + oldname + " in table " + tablename + " not found exists");
}
}
|
java
|
@Nonnull
public ETriState isValidPostalCode (@Nullable final Locale aCountry, @Nullable final String sPostalCode)
{
final IPostalCodeCountry aPostalCountry = getPostalCountryOfCountry (aCountry);
if (aPostalCountry == null)
return ETriState.UNDEFINED;
return ETriState.valueOf (aPostalCountry.isValidPostalCode (sPostalCode));
}
|
python
|
def createFieldDescription(self):
"""
Provides a field description dict for swarm description.
:return: (dict)
"""
return {
"fieldName": self.getName(),
"fieldType": self._dataType,
"minValue": self._min,
"maxValue": self._max
}
|
java
|
@Override
public CreatePlatformEndpointResult createPlatformEndpoint(CreatePlatformEndpointRequest request) {
request = beforeClientExecution(request);
return executeCreatePlatformEndpoint(request);
}
|
python
|
def extractFromURL(url,
cache=False,
cacheDir='_cache',
verbose=False,
encoding=None,
filters=None,
userAgent=None,
timeout=5,
blur=5,
ignore_robotstxt=False,
only_mime_types=None,
raw=False):
"""
Extracts text from a URL.
Parameters:
url := string
Remote URL or local filename where HTML will be read.
cache := bool
True=store and retrieve url from cache
False=always retrieve url from the web
cacheDir := str
Directory where cached url contents will be stored.
verbose := bool
True=print logging messages
False=print no output
encoding := string
The encoding of the page contents.
If none given, it will attempt to guess the encoding.
See http://docs.python.org/howto/unicode.html for further info
on Python Unicode and encoding support.
filters := string
Comma-delimited list of filters to apply before parsing.
only_mime_types := list of strings
A list of mime-types to limit parsing to.
If the mime-type of the raw-content retrieved does not match
one of these, a value of None will be returned.
"""
blur = int(blur)
try:
import chardet
except ImportError as e:
raise ImportError(("%s\nYou need to install chardet.\n" + \
"e.g. sudo pip install chardet") % e)
if only_mime_types and isinstance(only_mime_types, six.text_type):
only_mime_types = only_mime_types.split(',')
# Load url from cache if enabled.
if cache:
if not os.path.isdir(cacheDir):
cache_perms = 488 # 750 in octal, '-rwxr-x---'
os.makedirs(cacheDir, cache_perms)
cache_key = generate_key(url)
cached_content = cache_get(cacheDir, cache_key)
if cached_content:
return cached_content
if not ignore_robotstxt:
if not check_robotstxt(url, cache, cacheDir, userAgent=userAgent):
if verbose: print("Request denied by robots.txt")
return ''
# Otherwise download the url.
if verbose: print('Reading %s...' % url)
html = fetch(
url,
timeout=timeout,
userAgent=userAgent,
only_mime_types=only_mime_types)
if not html:
return ''
# If no encoding guess given, then attempt to determine
# encoding automatically.
if not encoding:
if isinstance(html, unicode):
html = html.encode('utf8', 'replace')
encoding_opinion = chardet.detect(html)
encoding = encoding_opinion['encoding']
if verbose: print('Using encoding %s.' % encoding)
# Save raw contents to cache if enabled.
if verbose: print('Read %i characters.' % len(html))
if cache:
raw_key = generate_key(url, "%s.raw")
cache_set(cacheDir, raw_key, html)
# Apply filters.
if filters:
filter_names = map(str.strip, filters.split(','))
for filter_name in filter_names:
fltr = get_filter(filter_name)
html = fltr(html)
# Clean up HTML.
html = tidyHTML(html)
if verbose: print('Extracted %i characters.' % len(html))
# Convert to Unicode.
if not html:
return ''
html = unicode(html, encoding=encoding, errors='replace')
if raw:
return html
# Extract text from HTML.
res = extractFromHTML(html, blur=blur)
assert isinstance(res, unicode)
# Save extracted text to cache if enabled.
res = res.encode(encoding, 'ignore')
if cache:
cache_set(cacheDir, cache_key, res)
return res
|
python
|
def record_sets_list_by_dns_zone(zone_name, resource_group, top=None, recordsetnamesuffix=None, **kwargs):
'''
.. versionadded:: Fluorine
Lists all record sets in a DNS zone.
:param zone_name: The name of the DNS zone (without a terminating dot).
:param resource_group: The name of the resource group.
:param top: The maximum number of record sets to return. If not specified,
returns up to 100 record sets.
:param recordsetnamesuffix: The suffix label of the record set name that has
to be used to filter the record set enumerations.
CLI Example:
.. code-block:: bash
salt-call azurearm_dns.record_sets_list_by_dns_zone myzone testgroup
'''
result = {}
dnsconn = __utils__['azurearm.get_client']('dns', **kwargs)
try:
record_sets = __utils__['azurearm.paged_object_to_list'](
dnsconn.record_sets.list_by_dns_zone(
zone_name=zone_name,
resource_group_name=resource_group,
top=top,
recordsetnamesuffix=recordsetnamesuffix
)
)
for record_set in record_sets:
result[record_set['name']] = record_set
except CloudError as exc:
__utils__['azurearm.log_cloud_error']('dns', str(exc), **kwargs)
result = {'error': str(exc)}
return result
|
java
|
private void merge(FieldNumberTree other) {
for (Key key : other.children.keySet()) {
FieldNumberTree value = other.children.get(key);
if (!this.children.containsKey(key)) {
this.children.put(key, value);
} else {
this.children.get(key).merge(value);
}
}
}
|
java
|
private String getNextLineOrVirtualBlankLineAtEndOfStream(String nextLine) {
if ((nextLine == null) && !isEndOfStream) {
isEndOfStream = true;
return "";
}
return nextLine;
}
|
python
|
def get_state_tuple(state, state_m=None):
""" Generates a tuple that holds the state as yaml-strings and its meta data in a dictionary.
The tuple consists of:
[0] json_str for state,
[1] dict of child_state tuples,
[2] dict of model_meta-data of self and elements
[3] path of state in state machine
[4] script_text
[5] file system path
[6] semantic data
# states-meta - [state-, transitions-, data_flows-, outcomes-, inputs-, outputs-, scopes, states-meta]
:param rafcon.core.states.state.State state: The state that should be stored
:return: state_tuple tuple
"""
state_str = json.dumps(state, cls=JSONObjectEncoder,
indent=4, check_circular=False, sort_keys=True)
state_tuples_dict = {}
if isinstance(state, ContainerState):
# print(state.states, "\n")
for child_state_id, child_state in state.states.items():
# print("child_state: %s" % child_state_id, child_state, "\n")
state_tuples_dict[child_state_id] = get_state_tuple(child_state)
state_meta_dict = {} if state_m is None else get_state_element_meta(state_m)
script_content = state.script.script if isinstance(state, ExecutionState) else None
state_tuple = (state_str, state_tuples_dict, state_meta_dict, state.get_path(), script_content,
state.file_system_path, copy.deepcopy(state.semantic_data))
return state_tuple
|
java
|
public MethodDescriptor[] getMethodDescriptors() {
BeanInfo info = getBeanInfo();
if (info == null) {
return null;
}
MethodDescriptor[] mds = info.getMethodDescriptors();
getTeaToolsUtils().sortMethodDescriptors(mds);
return mds;
}
|
java
|
private boolean isToBeCastedAnyType(LightweightTypeReference actualType, XExpression obj, ITreeAppendable appendable) {
if (actualType instanceof AnyTypeReference) {
if (getReferenceName(obj, appendable) != null)
return true;
else if (obj instanceof XBlockExpression) {
XBlockExpression blockExpression = (XBlockExpression) obj;
EList<XExpression> expressions = blockExpression.getExpressions();
if (expressions.isEmpty())
return false;
if (expressions.size() > 1)
return true;
XExpression last = expressions.get(0);
return isToBeCastedAnyType(actualType, last, appendable);
}
}
return false;
}
|
python
|
def smooth_mesh(mesh, n_iter=4, lam=0.6307, mu=-0.6347,
weights=None, bconstr=True,
volume_corr=False):
"""
FE mesh smoothing.
Based on:
[1] Steven K. Boyd, Ralph Muller, Smooth surface meshing for automated
finite element model generation from 3D image data, Journal of
Biomechanics, Volume 39, Issue 7, 2006, Pages 1287-1295,
ISSN 0021-9290, 10.1016/j.jbiomech.2005.03.006.
(http://www.sciencedirect.com/science/article/pii/S0021929005001442)
Parameters
----------
mesh : mesh
FE mesh.
n_iter : integer, optional
Number of iteration steps.
lam : float, optional
Smoothing factor, see [1].
mu : float, optional
Unshrinking factor, see [1].
weights : array, optional
Edge weights, see [1].
bconstr: logical, optional
Boundary constraints, if True only surface smoothing performed.
volume_corr: logical, optional
Correct volume after smoothing process.
Returns
-------
coors : array
Coordinates of mesh nodes.
"""
def laplacian(coors, weights):
n_nod = coors.shape[0]
displ = (weights - sps.identity(n_nod)) * coors
return displ
def taubin(coors0, weights, lam, mu, n_iter):
coors = coors0.copy()
for ii in range(n_iter):
displ = laplacian(coors, weights)
if nm.mod(ii, 2) == 0:
coors += lam * displ
else:
coors += mu * displ
return coors
def dets_fast(a):
m = a.shape[0]
n = a.shape[1]
lapack_routine = lapack_lite.dgetrf
pivots = nm.zeros((m, n), intc)
flags = nm.arange(1, n + 1).reshape(1, -1)
for i in xrange(m):
tmp = a[i]
lapack_routine(n, n, tmp, n, pivots[i], 0)
sign = 1. - 2. * (nm.add.reduce(pivots != flags, axis=1) % 2)
idx = nm.arange(n)
d = a[:, idx, idx]
absd = nm.absolute(d)
sign *= nm.multiply.reduce(d / absd, axis=1)
nm.log(absd, absd)
logdet = nm.add.reduce(absd, axis=-1)
return sign * nm.exp(logdet)
def get_volume(el, nd):
dim = nd.shape[1]
nnd = el.shape[1]
etype = '%d_%d' % (dim, nnd)
if etype == '2_4' or etype == '3_8':
el = elems_q2t(el)
nel = el.shape[0]
mul = 1.0 / factorial(dim)
if dim == 3:
mul *= -1.0
mtx = nm.ones((nel, dim + 1, dim + 1), dtype=nm.double)
mtx[:,:,:-1] = nd[el,:]
vols = mul * dets_fast(mtx.copy()) # copy() ???
vol = vols.sum()
bc = nm.dot(vols, mtx.sum(1)[:,:-1] / nnd)
bc /= vol
return vol, bc
n_nod = mesh.n_nod
if weights is None:
# initiate all vertices as inner - hierarchy = 2
node_group = nm.ones((n_nod,), dtype=nm.int8) * 2
sndi, edges = get_snodes_uedges(mesh.conns[0], mesh.descs[0])
# boundary vertices - set hierarchy = 4
if bconstr:
node_group[sndi] = 4
# generate costs matrix
end1 = edges[:,0]
end2 = edges[:,1]
idxs = nm.where(node_group[end2] >= node_group[end1])
rows1 = end1[idxs]
cols1 = end2[idxs]
idxs = nm.where(node_group[end1] >= node_group[end2])
rows2 = end2[idxs]
cols2 = end1[idxs]
crows = nm.concatenate((rows1, rows2))
ccols = nm.concatenate((cols1, cols2))
costs = sps.coo_matrix((nm.ones_like(crows), (crows, ccols)),
shape=(n_nod, n_nod),
dtype=nm.double)
# generate weights matrix
idxs = range(n_nod)
aux = sps.coo_matrix((1.0 / nm.asarray(costs.sum(1)).squeeze(),
(idxs, idxs)),
shape=(n_nod, n_nod),
dtype=nm.double)
#aux.setdiag(1.0 / costs.sum(1))
weights = (aux.tocsc() * costs.tocsc()).tocsr()
coors = taubin(mesh.coors, weights, lam, mu, n_iter)
if volume_corr:
volume0, bc = get_volume(mesh.conns[0], mesh.coors)
volume, _ = get_volume(mesh.conns[0], coors)
scale = volume0 / volume
coors = (coors - bc) * scale + bc
return coors
|
python
|
def tracker():
"""start a tracker to register running models"""
application = mmi.tracker.app()
application.listen(22222)
logger.info('serving at port 22222')
tornado.ioloop.IOLoop.instance().start()
|
python
|
def create_tag(tag_name, kind='z'):
'''
Create tag record by tag_name
'''
cur_recs = TabTag.select().where(
(TabTag.name == tag_name) &
(TabTag.kind == kind)
)
if cur_recs.count():
uid = cur_recs.get().uid
# TabTag.delete().where(
# (TabTag.name == tag_name) &
# (TabTag.kind == kind)
# ).execute()
else:
uid = tools.get_uu4d_v2() # Label with the ID of v2.
while TabTag.select().where(TabTag.uid == uid).count() > 0:
uid = tools.get_uu4d_v2()
TabTag.create(
uid=uid,
slug=uid,
name=tag_name,
order=1,
count=0,
kind='z',
tmpl=9,
pid='zzzz',
)
return uid
|
python
|
def get_log_hierarchy_session(self, proxy):
"""Gets the ``OsidSession`` associated with the log hierarchy service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.logging.LogHierarchySession) - a
``LogHierarchySession`` for logs
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_log_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_log_hierarchy()`` is ``true``.*
"""
if not self.supports_log_hierarchy():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.LogHierarchySession(proxy=proxy, runtime=self._runtime)
|
java
|
public WebSocket flush()
{
synchronized (mStateManager)
{
WebSocketState state = mStateManager.getState();
if (state != OPEN && state != CLOSING)
{
return this;
}
}
// Get the reference to the instance of WritingThread.
WritingThread wt = mWritingThread;
// If and only if an instance of WritingThread is available.
if (wt != null)
{
// Request flush.
wt.queueFlush();
}
return this;
}
|
python
|
def vrelg(v1, v2, ndim):
"""
Return the relative difference between two vectors of general dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vrelg_c.html
:param v1: First vector
:type v1: Array of floats
:param v2: Second vector
:type v2: Array of floats
:param ndim: Dimension of v1 and v2.
:type ndim: int
:return: the relative difference between v1 and v2.
:rtype: float
"""
v1 = stypes.toDoubleVector(v1)
v2 = stypes.toDoubleVector(v2)
ndim = ctypes.c_int(ndim)
return libspice.vrelg_c(v1, v2, ndim)
|
java
|
public GetStatUvResponse getStatUv(GetStatUvRequest request) {
InternalRequest internalRequest = this.createRequest(request, HttpMethodName.GET, STAT, "uv");
if (request.getStartTime() != null) {
internalRequest.addParameter("startTime", DateUtils.formatAlternateIso8601Date(request.getStartTime()));
}
if (request.getEndTime() != null) {
internalRequest.addParameter("endTime", DateUtils.formatAlternateIso8601Date(request.getEndTime()));
}
if (request.getDomain() != null) {
internalRequest.addParameter("domain", request.getDomain());
}
if (request.getPeriod() != null) {
internalRequest.addParameter("period", String.valueOf(request.getPeriod()));
}
return this.invokeHttpClient(internalRequest, GetStatUvResponse.class);
}
|
java
|
protected ClientResponse deleteResponse(final WebResource webResource, final boolean checkResponse) {
final ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON).delete(ClientResponse.class);
if(checkResponse) {
this.checkResponse(response);
}
return response;
}
|
python
|
def get_plugins(modules, classobj):
"""Find all class objects in all modules.
@param modules: the modules to search
@ptype modules: iterator of modules
@return: found classes
@rytpe: iterator of class objects
"""
for module in modules:
for plugin in get_module_plugins(module, classobj):
yield plugin
|
java
|
private void setupJDK() throws AbortException, IOException, InterruptedException {
String jdkInstallationName = step.getJdk();
if (StringUtils.isEmpty(jdkInstallationName)) {
console.println("[withMaven] using JDK installation provided by the build agent");
return;
}
if (withContainer) {
// see #detectWithContainer()
LOGGER.log(Level.FINE, "Ignoring JDK installation parameter: {0}", jdkInstallationName);
console.println("WARNING: \"withMaven(){...}\" step running within a container," +
" tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. " +
"You have specified a JDK installation \"" + jdkInstallationName + "\", which will be ignored.");
return;
}
console.println("[withMaven] using JDK installation " + jdkInstallationName);
JDK jdk = Jenkins.getInstance().getJDK(jdkInstallationName);
if (jdk == null) {
throw new AbortException("Could not find the JDK installation: " + jdkInstallationName + ". Make sure it is configured on the Global Tool Configuration page");
}
Node node = getComputer().getNode();
if (node == null) {
throw new AbortException("Could not obtain the Node for the computer: " + getComputer().getName());
}
jdk = jdk.forNode(node, listener).forEnvironment(env);
jdk.buildEnvVars(envOverride);
}
|
python
|
def read_field_h5(xdmf_file, fieldname, snapshot, header=None):
"""Extract field data from hdf5 files.
Args:
xdmf_file (:class:`pathlib.Path`): path of the xdmf file.
fieldname (str): name of field to extract.
snapshot (int): snapshot number.
header (dict): geometry information.
Returns:
(dict, numpy.array): geometry information and field data. None
is returned if data is unavailable.
"""
if header is None:
header, xdmf_root = read_geom_h5(xdmf_file, snapshot)
else:
xdmf_root = xmlET.parse(str(xdmf_file)).getroot()
npc = header['nts'] // header['ncs'] # number of grid point per node
flds = np.zeros(_flds_shape(fieldname, header))
data_found = False
for elt_subdomain in xdmf_root[0][0][snapshot].findall('Grid'):
ibk = int(elt_subdomain.get('Name').startswith('meshYang'))
for data_attr in elt_subdomain.findall('Attribute'):
if data_attr.get('Name') != fieldname:
continue
icore, fld = _get_field(xdmf_file, data_attr.find('DataItem'))
# for some reason, the field is transposed
fld = fld.T
shp = fld.shape
if shp[-1] == 1 and header['nts'][0] == 1: # YZ
fld = fld.reshape((shp[0], 1, shp[1], shp[2]))
if header['rcmb'] < 0:
fld = fld[(2, 0, 1), ...]
elif shp[-1] == 1: # XZ
fld = fld.reshape((shp[0], shp[1], 1, shp[2]))
if header['rcmb'] < 0:
fld = fld[(0, 2, 1), ...]
elif header['nts'][1] == 1: # cart XZ
fld = fld.reshape((1, shp[0], 1, shp[1]))
ifs = [icore // np.prod(header['ncs'][:i]) % header['ncs'][i] *
npc[i] for i in range(3)]
if header['zp']: # remove top row
fld = fld[:, :, :, :-1]
flds[:,
ifs[0]:ifs[0] + npc[0] + header['xp'],
ifs[1]:ifs[1] + npc[1] + header['yp'],
ifs[2]:ifs[2] + npc[2],
ibk] = fld
data_found = True
flds = _post_read_flds(flds, header)
return (header, flds) if data_found else None
|
java
|
public static base_responses update(nitro_service client, vpnvserver resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
vpnvserver updateresources[] = new vpnvserver[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new vpnvserver();
updateresources[i].name = resources[i].name;
updateresources[i].ipv46 = resources[i].ipv46;
updateresources[i].authentication = resources[i].authentication;
updateresources[i].doublehop = resources[i].doublehop;
updateresources[i].icaonly = resources[i].icaonly;
updateresources[i].maxaaausers = resources[i].maxaaausers;
updateresources[i].downstateflush = resources[i].downstateflush;
updateresources[i].listenpolicy = resources[i].listenpolicy;
updateresources[i].listenpriority = resources[i].listenpriority;
updateresources[i].tcpprofilename = resources[i].tcpprofilename;
updateresources[i].httpprofilename = resources[i].httpprofilename;
updateresources[i].comment = resources[i].comment;
updateresources[i].appflowlog = resources[i].appflowlog;
updateresources[i].icmpvsrresponse = resources[i].icmpvsrresponse;
updateresources[i].netprofile = resources[i].netprofile;
updateresources[i].cginfrahomepageredirect = resources[i].cginfrahomepageredirect;
updateresources[i].maxloginattempts = resources[i].maxloginattempts;
updateresources[i].failedlogintimeout = resources[i].failedlogintimeout;
}
result = update_bulk_request(client, updateresources);
}
return result;
}
|
java
|
@Override
public final void setSymbolTable(SymbolTable symbols)
throws IOException
{
if (symbols == null || _Private_Utils.symtabIsSharedNotSystem(symbols)) {
throw new IllegalArgumentException("symbol table must be local or system to be set, or reset");
}
if (getDepth() > 0) {
throw new IllegalStateException("the symbol table cannot be set, or reset, while a container is open");
}
_symbol_table = symbols;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.