language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | protected void addToList(List<IEObjectDescription> descriptions, List<IEObjectDescription> result) {
result.addAll(descriptions);
} |
python | def close_alert(name=None, api_key=None, reason="Conditions are met.",
action_type="Close"):
'''
Close an alert in OpsGenie. It's a wrapper function for create_alert.
Example usage with Salt's requisites and other global state arguments
could be found above.
Required Parameters:
name
It will be used as alert's alias. If you want to use the close
functionality you must provide name field for both states like
in above case.
Optional Parameters:
api_key
It's the API Key you've copied while adding integration in OpsGenie.
reason
It will be used as alert's default message in OpsGenie.
action_type
OpsGenie supports the default values Create/Close for action_type.
You can customize this field with OpsGenie's custom actions for
other purposes like adding notes or acknowledging alerts.
'''
if name is None:
raise salt.exceptions.SaltInvocationError(
'Name cannot be None.')
return create_alert(name, api_key, reason, action_type) |
java | @POST
@Produces({"script/groovy"})
@Path("src/{repository}/{workspace}/{path:.*}")
public Response getScript(@PathParam("repository") String repository, @PathParam("workspace") String workspace,
@PathParam("path") String path)
{
Session ses = null;
try
{
ses =
sessionProviderService.getSessionProvider(null).getSession(workspace,
repositoryService.getRepository(repository));
Node scriptFile = (Node)ses.getItem("/" + path);
return Response.status(Response.Status.OK).entity(
scriptFile.getNode("jcr:content").getProperty("jcr:data").getStream()).type("script/groovy").build();
}
catch (PathNotFoundException e)
{
String msg = "Path " + path + " does not exists";
LOG.error(msg);
return Response.status(Response.Status.NOT_FOUND).entity(msg).entity(MediaType.TEXT_PLAIN).build();
}
catch (Exception e)
{
LOG.error(e.getMessage(), e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(e.getMessage())
.type(MediaType.TEXT_PLAIN).build();
}
finally
{
if (ses != null)
{
ses.logout();
}
}
} |
python | def cast_callback(value):
"""Override `cast_callback` method.
"""
# Postgresql / MySQL drivers change the format on 'TIMESTAMP' columns;
if 'T' in value:
value = value.replace('T', ' ')
return datetime.strptime(value.split('.')[0], '%Y-%m-%d %H:%M:%S') |
python | def _lats(self):
"""Return the latitudes (in degrees) of the gridded data."""
lats = _np.linspace(90.0, -90.0 + 180.0 / self.nlat, num=self.nlat)
return lats |
java | @Override
public String extractSignedToken(String token) {
String[] chunks = token.split("-", 3);
if (chunks.length != 3) {
// Invalid format
return null;
}
String signature = chunks[0];
String nonce = chunks[1];
String raw = chunks[2];
if (constantTimeEquals(signature, sign(nonce + "-" + raw))) {
return raw;
} else {
return null;
}
} |
java | public boolean wasKeyTyped(int keyCode) {
if (this.isKeyDown(keyCode) && !this.checked.contains(keyCode)) {
this.checked.add(keyCode);
return true;
}
return false;
} |
java | public boolean isRegistered( String clientID , String subscriptionName )
{
String key = clientID+"-"+subscriptionName;
return subscriptions.containsKey(key);
} |
python | def _wr_txt_nts(self, fout_txt, desc2nts, objgowr, verbose):
"""Write grouped and sorted GO IDs to GOs."""
with open(fout_txt, 'w') as prt:
self._prt_ver_n_key(prt, verbose)
prt.write('\n\n')
prt.write('# ----------------------------------------------------------------\n')
prt.write('# - Sections and GO IDs\n')
prt.write('# ----------------------------------------------------------------\n')
prtfmt = self._get_prtfmt(objgowr, verbose)
summary_dct = objgowr.prt_txt_desc2nts(prt, desc2nts, prtfmt)
if summary_dct:
print(" {N:>5} GO IDs WROTE: {FOUT} ({S} sections)".format(
N=desc2nts['num_items'], FOUT=fout_txt, S=desc2nts['num_sections']))
else:
print(" WROTE: {TXT}".format(TXT=fout_txt)) |
python | def urlize_twitter(text):
"""
Replace #hashtag and @username references in a tweet with HTML text.
"""
html = TwitterText(text).autolink.auto_link()
return mark_safe(html.replace(
'twitter.com/search?q=', 'twitter.com/search/realtime/')) |
python | def fit(arr, dist='norm'):
"""Fit an array to a univariate distribution along the time dimension.
Parameters
----------
arr : xarray.DataArray
Time series to be fitted along the time dimension.
dist : str
Name of the univariate distribution, such as beta, expon, genextreme, gamma, gumbel_r, lognorm, norm
(see scipy.stats).
Returns
-------
xarray.DataArray
An array of distribution parameters fitted using the method of Maximum Likelihood.
"""
# Get the distribution
dc = get_dist(dist)
# Fit the parameters (lazy computation)
data = dask.array.apply_along_axis(dc.fit, arr.get_axis_num('time'), arr)
# Count the number of values used for the fit.
# n = arr.count(dim='time')
# Create a view to a DataArray with the desired dimensions to copy them over to the parameter array.
mean = arr.mean(dim='time', keep_attrs=True)
# Create coordinate for the distribution parameters
coords = dict(mean.coords.items())
coords['dparams'] = ([] if dc.shapes is None else dc.shapes.split(',')) + ['loc', 'scale']
# TODO: add time and time_bnds coordinates (Low will work on this)
# time.attrs['climatology'] = 'climatology_bounds'
# coords['time'] =
# coords['climatology_bounds'] =
out = xr.DataArray(data=data, coords=coords, dims=(u'dparams',) + mean.dims)
out.attrs = arr.attrs
out.attrs['original_name'] = getattr(arr, 'standard_name', '')
out.attrs['standard_name'] = '{0} distribution parameters'.format(dist)
out.attrs['long_name'] = '{0} distribution parameters for {1}'.format(dist, getattr(arr, 'standard_name', ''))
out.attrs['estimator'] = 'Maximum likelihood'
out.attrs['cell_methods'] = (out.attrs.get('cell_methods', '') + ' time: fit').strip()
out.attrs['units'] = ''
msg = '\nData fitted with {0} statistical distribution using a Maximum Likelihood Estimator'
out.attrs['history'] = out.attrs.get('history', '') + msg.format(dist)
return out |
java | public Any execute(DeviceImpl device, Any in_any) throws DevFailed
{
Util.out4.println("PolledDeviceCmd.execute(): arrived ");
// Call the device method and return to caller
return insert(((DServer)(device)).polled_device());
} |
java | public List<String> parse(String[] args) { // merge args & defaults
List<String> extras = new ArrayList<>();
List<String> filteredArgs = filterMonadics(args); // detect and fill mons
for (int i = 0; i < filteredArgs.size(); i++) {
String key = filteredArgs.get(i);
if (key.equalsIgnoreCase("-h") || key.equalsIgnoreCase("-help")) {
System.out.println(usage);
System.exit(0);
}
if (parameters.containsKey(key)) {
Parameter param = parameters.get(key);
param.value = filteredArgs.get(i+1);
switch(param.paramType) {
case INTEGER:
try {
Integer.parseInt(param.value);
} catch (Exception ex) {
System.err.println("Invalid parameter " + param.name + ' ' + param.value);
System.err.println(usage);
System.exit(1);
}
break;
case FLOAT:
try {
Double.parseDouble(param.value);
} catch (Exception ex) {
System.err.println("Invalid parameter " + param.name + ' ' + param.value);
System.err.println(usage);
System.exit(1);
}
break;
default:
// Just to remove unmatched case warning
}
i++;
} else {
extras.add(key);
}
}
for(String key : parameters.keySet()) {
Parameter param = parameters.get(key);
if (param.mandatory && param.value == null) {
System.err.println("Missing mandatory parameter: " + key);
System.err.println(usage);
System.exit(1);
}
}
return extras; // parsed + defaults
} |
java | @Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case AfplibPackage.MSU__RG:
return rg != null && !rg.isEmpty();
}
return super.eIsSet(featureID);
} |
python | def hset(self, hashkey, attribute, value):
"""Emulate hset."""
redis_hash = self._get_hash(hashkey, 'HSET', create=True)
attribute = self._encode(attribute)
attribute_present = attribute in redis_hash
redis_hash[attribute] = self._encode(value)
return long(0) if attribute_present else long(1) |
java | public void setPreferredMacCS(String name) throws SshException {
if (name == null)
return;
if (macCS.contains(name)) {
prefMacCS = name;
setMacPreferredPositionCS(name, 0);
} else {
throw new SshException(name + " is not supported",
SshException.UNSUPPORTED_ALGORITHM);
}
} |
java | @Override
public java.util.concurrent.Future<ListOnPremisesInstancesResult> listOnPremisesInstancesAsync(
com.amazonaws.handlers.AsyncHandler<ListOnPremisesInstancesRequest, ListOnPremisesInstancesResult> asyncHandler) {
return listOnPremisesInstancesAsync(new ListOnPremisesInstancesRequest(), asyncHandler);
} |
python | def get_kibiter_version(url):
"""
Return kibiter major number version
The url must point to the Elasticsearch used by Kibiter
"""
config_url = '.kibana/config/_search'
# Avoid having // in the URL because ES will fail
if url[-1] != '/':
url += "/"
url += config_url
r = requests.get(url)
r.raise_for_status()
if len(r.json()['hits']['hits']) == 0:
logger.error("Can not get the Kibiter version")
return None
version = r.json()['hits']['hits'][0]['_id']
# 5.4.0-SNAPSHOT
major_version = version.split(".", 1)[0]
return major_version |
java | public static RenderingContext anonymous(final ApplicationInfo appInfo,
final CalendarProvider provider) {
final UserImpl user2 = new UserImpl();
user2.setUsername("Anonymous");
user2.setFirstname("Al");
user2.setLastname("Anonymous");
user2.clearRoles();
user2.setEmail("[email protected]");
return new RenderingContext(user2, appInfo, provider);
} |
java | public static Stream<String> getLanguageCodes() {
return Stream.of(
LANGUAGE_CODE_EN,
LANGUAGE_CODE_NL,
LANGUAGE_CODE_DE,
LANGUAGE_CODE_ES,
LANGUAGE_CODE_IT,
LANGUAGE_CODE_PT,
LANGUAGE_CODE_FR,
LANGUAGE_CODE_XX);
} |
java | public static boolean matchBitByBitIndex(final int pVal, final int pBitIndex) {
if (pBitIndex < 0 || pBitIndex > MAX_BIT_INTEGER) {
throw new IllegalArgumentException(
"parameter 'pBitIndex' must be between 0 and 31. pBitIndex=" + pBitIndex);
}
return (pVal & 1 << pBitIndex) != 0;
} |
java | @Override
public void setData(final Object data) {
if (isRichTextArea() && data instanceof String) {
super.setData(sanitizeInputText((String) data));
} else {
super.setData(data);
}
} |
python | def get_target_list(self, scan_id):
""" Get a scan's target list. """
target_list = []
for target, _, _ in self.scans_table[scan_id]['targets']:
target_list.append(target)
return target_list |
java | public List<Grant> getGrantsAsList() {
checkState();
if (grantList == null) {
if (grantSet == null) {
grantList = new LinkedList<Grant>();
} else {
grantList = new LinkedList<Grant>(grantSet);
grantSet = null;
}
}
return grantList;
} |
python | def from_dict(self, document):
"""Create experiment object from JSON document retrieved from database.
Parameters
----------
document : JSON
Json document in database
Returns
-------
ExperimentHandle
Handle for experiment object
"""
identifier = str(document['_id'])
active = document['active']
timestamp = datetime.datetime.strptime(document['timestamp'], '%Y-%m-%dT%H:%M:%S.%f')
properties = document['properties']
subject_id = document['subject']
image_group_id = document['images']
fmri_data_id = document['fmri'] if 'fmri' in document else None
return ExperimentHandle(
identifier,
properties,
subject_id,
image_group_id,
fmri_data_id=fmri_data_id,
timestamp=timestamp,
is_active=active
) |
java | static HeaderEntry parse(final String elementLine) throws IOException {
if (!elementLine.startsWith("element ")) {
throw new IOException("not an element: '"
+ elementLine + "'");
}
String definition = elementLine.substring("element ".length());
String[] parts = definition.split(" +", 2);
if (parts.length != 2) {
throw new IOException("Expected two parts in element definition: '"
+ elementLine + "'");
}
String name = parts[0];
String countStr = parts[1];
int count;
try {
count = Integer.parseInt(countStr);
}
catch (NumberFormatException e) {
throw new IOException("Invalid element entry. Not an integer: '"
+ countStr + "'.");
}
return new HeaderEntry(name, count);
} |
python | def devices_data():
"""Returns devices data.
"""
response = {}
for part in psutil.disk_partitions():
device = part.device
response[device] = {
'device': device,
'mountpoint': part.mountpoint,
'fstype': part.fstype,
'opts': part.opts,
}
if part.mountpoint:
usage = psutil.disk_usage(part.mountpoint)
response[device]['usage'] = {
'size': mark(usage.total, 'bytes'),
'used': mark(usage.used, 'bytes'),
'free': mark(usage.free, 'bytes'),
'percent': mark(usage.percent, 'percentage')
}
return response |
python | def detect_python2(source, pathname):
"""
Returns a bool indicating whether we think the code is Py2
"""
RTs.setup_detect_python2()
try:
tree = RTs._rt_py2_detect.refactor_string(source, pathname)
except ParseError as e:
if e.msg != 'bad input' or e.value != '=':
raise
tree = RTs._rtp.refactor_string(source, pathname)
if source != str(tree)[:-1]: # remove added newline
# The above fixers made changes, so we conclude it's Python 2 code
logger.debug('Detected Python 2 code: {0}'.format(pathname))
with open('/tmp/original_code.py', 'w') as f:
f.write('### Original code (detected as py2): %s\n%s' %
(pathname, source))
with open('/tmp/py2_detection_code.py', 'w') as f:
f.write('### Code after running py3 detection (from %s)\n%s' %
(pathname, str(tree)[:-1]))
return True
else:
logger.debug('Detected Python 3 code: {0}'.format(pathname))
with open('/tmp/original_code.py', 'w') as f:
f.write('### Original code (detected as py3): %s\n%s' %
(pathname, source))
try:
os.remove('/tmp/futurize_code.py')
except OSError:
pass
return False |
java | protected void generatePackageUseFile() throws DocFileIOException {
HtmlTree body = getPackageUseHeader();
HtmlTree div = new HtmlTree(HtmlTag.DIV);
div.addStyle(HtmlStyle.contentContainer);
if (usingPackageToUsedClasses.isEmpty()) {
div.addContent(contents.getContent("doclet.ClassUse_No.usage.of.0", utils.getPackageName(packageElement)));
} else {
addPackageUse(div);
}
if (configuration.allowTag(HtmlTag.MAIN)) {
mainTree.addContent(div);
body.addContent(mainTree);
} else {
body.addContent(div);
}
HtmlTree tree = (configuration.allowTag(HtmlTag.FOOTER))
? HtmlTree.FOOTER()
: body;
addNavLinks(false, tree);
addBottom(tree);
if (configuration.allowTag(HtmlTag.FOOTER)) {
body.addContent(tree);
}
printHtmlDocument(null, true, body);
} |
python | async def workerTypeStats(self, *args, **kwargs):
"""
Look up the resource stats for a workerType
Return an object which has a generic state description. This only contains counts of instances
This method gives output: ``v1/worker-type-resources.json#``
This method is ``experimental``
"""
return await self._makeApiCall(self.funcinfo["workerTypeStats"], *args, **kwargs) |
java | public String includeJspString(String jspPath,
HttpServletRequest httpRequest, HttpServletResponse httpResponse,
WaybackRequest wbRequest, CaptureSearchResults results,
CaptureSearchResult result, Resource resource)
throws ServletException, IOException {
if (wbRequest.isAjaxRequest()) {
return "";
}
UIResults uiResults = new UIResults(wbRequest,uriConverter,results,
result,resource);
StringHttpServletResponseWrapper wrappedResponse =
new StringHttpServletResponseWrapper(httpResponse);
uiResults.forward(httpRequest, wrappedResponse, jspPath);
return wrappedResponse.getStringResponse();
} |
java | public static boolean hasProperty(Class<?> clazz, Language language, String... propertyComponents) {
return hasProperty(clazz.getName(), language, propertyComponents);
} |
java | public String directory(Node leftdir, Node rightdir, Filter filter) throws IOException {
List<String> paths;
paths = paths(leftdir, filter);
paths(rightdir, filter, paths);
return directory(leftdir, rightdir, paths);
} |
python | def _resume_ssl_session(
server_info: ServerConnectivityInfo,
ssl_version_to_use: OpenSslVersionEnum,
ssl_session: Optional[nassl._nassl.SSL_SESSION] = None,
should_enable_tls_ticket: bool = False
) -> nassl._nassl.SSL_SESSION:
"""Connect to the server and returns the session object that was assigned for that connection.
If ssl_session is given, tries to resume that session.
"""
ssl_connection = server_info.get_preconfigured_ssl_connection(override_ssl_version=ssl_version_to_use)
if not should_enable_tls_ticket:
# Need to disable TLS tickets to test session IDs, according to rfc5077:
# If a ticket is presented by the client, the server MUST NOT attempt
# to use the Session ID in the ClientHello for stateful session resumption
ssl_connection.ssl_client.disable_stateless_session_resumption() # Turning off TLS tickets.
if ssl_session:
ssl_connection.ssl_client.set_session(ssl_session)
try:
# Perform the SSL handshake
ssl_connection.connect()
new_session = ssl_connection.ssl_client.get_session() # Get session data
finally:
ssl_connection.close()
return new_session |
java | @Override
public boolean isValid(Object obj, ConstraintValidatorContext constraintContext) {
// Si l'Objet est null
if(obj == null) return acceptNullObject;
// Si l'Objet est un tableau
if(obj.getClass().isArray()) {
// Taille
int size = Array.getLength(obj);
// On retourne la comparaison
return (size >= min && size <= max);
}
// Si l'objet est une collection
if(obj instanceof Collection<?>) {
// Taille
int size = ((Collection<?>) obj).size();
// On retourne la comparaison
return (size >= min && size <= max);
}
// Si l'objet est une map
if(obj instanceof Map<?, ?>) {
// Taille
int size = ((Map<?, ?>) obj).size();
// On retourne la comparaison
return (size >= min && size <= max);
}
// Si l'Objet est une instance de Chaine
if(obj instanceof String) {
// Chaine locale
String localString = ((String) obj);
// Si on doit trimmer
if(trimString) localString = localString.trim();
// Taille
int size = localString.length();
// On retourne la comparaison
return (size >= min && size <= max);
}
// Si c'est autre chose on retourne false
return false;
} |
python | def write_case_data(self, file):
""" Writes the header to file.
"""
case_sheet = self.book.add_sheet("Case")
case_sheet.write(0, 0, "Name")
case_sheet.write(0, 1, self.case.name)
case_sheet.write(1, 0, "base_mva")
case_sheet.write(1, 1, self.case.base_mva) |
java | private static RuntimeException getCause(InvocationTargetException e) {
Throwable cause = e.getCause();
if(cause instanceof RuntimeException)
throw (RuntimeException) cause;
else
throw new IllegalArgumentException(e.getCause());
} |
python | def showMetadata(dat):
"""
Display the metadata specified LiPD in pretty print
| Example
| showMetadata(D["Africa-ColdAirCave.Sundqvist.2013"])
:param dict dat: Metadata
:return none:
"""
_tmp = rm_values_fields(copy.deepcopy(dat))
print(json.dumps(_tmp, indent=2))
return |
python | def __analizar_observaciones(self, ret):
"Comprueba y extrae observaciones si existen en la respuesta XML"
self.Observaciones = [obs["codigoDescripcion"] for obs in ret.get('arrayObservaciones', [])]
self.Obs = '\n'.join(["%(codigo)s: %(descripcion)s" % obs for obs in self.Observaciones]) |
python | def script():
"""Run the command-line script."""
parser = argparse.ArgumentParser(description="Print all textual tags of one or more audio files.")
parser.add_argument("-b", "--batch", help="disable user interaction", action="store_true")
parser.add_argument("file", nargs="+", help="file(s) to print tags of")
args = parser.parse_args()
for filename in args.file:
if isinstance(filename, bytes):
filename = filename.decode(sys.getfilesystemencoding())
line = "TAGS OF '{0}'".format(os.path.basename(filename))
print("*" * len(line))
print(line)
print("*" * len(line))
audioFile = taglib.File(filename)
tags = audioFile.tags
if len(tags) > 0:
maxKeyLen = max(len(key) for key in tags.keys())
for key, values in tags.items():
for value in values:
print(('{0:' + str(maxKeyLen) + '} = {1}').format(key, value))
if len(audioFile.unsupported) > 0:
print('Unsupported tag elements: ' + "; ".join(audioFile.unsupported))
if sys.version_info[0] == 2:
inputFunction = raw_input
else:
inputFunction = input
if not args.batch and inputFunction("remove unsupported properties? [yN] ").lower() in ["y", "yes"]:
audioFile.removeUnsupportedProperties(audioFile.unsupported)
audioFile.save() |
java | public String computeSignature(String requestMethod, String targetUrl, Map<String, String> params) {
return computeSignature(requestMethod, targetUrl, params, consumerSecret, tokenSecret);
} |
java | public static NiftyProcessorFactory factoryFromTProcessor(final TProcessor standardThriftProcessor)
{
checkProcessMethodSignature();
return new NiftyProcessorFactory()
{
@Override
public NiftyProcessor getProcessor(TTransport transport)
{
return processorFromTProcessor(standardThriftProcessor);
}
};
} |
python | def get(self, blueprint, user=None, user_id=None):
""" When you have a statement in your code that says
"if <provider>.authorized:" (for example "if twitter.authorized:"),
a long string of function calls result in this function being used to
check the Flask server's cache and database for any records associated
with the current_user. The `user` and `user_id` parameters are actually
not set in that case (see base.py:token(), that's what calls this
function), so the user information is instead loaded from the
current_user (if that's what you specified when you created the
blueprint) with blueprint.config.get('user_id').
:param blueprint:
:param user:
:param user_id:
:return:
"""
# check cache
cache_key = self.make_cache_key(blueprint=blueprint, user=user, user_id=user_id)
token = self.cache.get(cache_key)
if token:
return token
# if not cached, make database queries
query = self.session.query(self.model).filter_by(provider=blueprint.name)
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
u = first(
_get_real_user(ref, self.anon_user)
for ref in (user, self.user, blueprint.config.get("user"))
)
if self.user_required and not u and not uid:
raise ValueError("Cannot get OAuth token without an associated user")
# check for user ID
if hasattr(self.model, "user_id") and uid:
query = query.filter_by(user_id=uid)
# check for user (relationship property)
elif hasattr(self.model, "user") and u:
query = query.filter_by(user=u)
# if we have the property, but not value, filter by None
elif hasattr(self.model, "user_id"):
query = query.filter_by(user_id=None)
# run query
try:
token = query.one().token
except NoResultFound:
token = None
# cache the result
self.cache.set(cache_key, token)
return token |
python | def compile_when_to_only_if(expression):
'''
when is a shorthand for writing only_if conditionals. It requires less quoting
magic. only_if is retained for backwards compatibility.
'''
# when: set $variable
# when: unset $variable
# when: failed $json_result
# when: changed $json_result
# when: int $x >= $z and $y < 3
# when: int $x in $alist
# when: float $x > 2 and $y <= $z
# when: str $x != $y
if type(expression) not in [ str, unicode ]:
raise errors.AnsibleError("invalid usage of when_ operator: %s" % expression)
tokens = expression.split()
if len(tokens) < 2:
raise errors.AnsibleError("invalid usage of when_ operator: %s" % expression)
# when_set / when_unset
if tokens[0] in [ 'set', 'unset' ]:
tcopy = tokens[1:]
for (i,t) in enumerate(tokens[1:]):
if t.find("$") != -1:
tcopy[i] = "is_%s('''%s''')" % (tokens[0], t)
else:
tcopy[i] = t
return " ".join(tcopy)
# when_failed / when_changed
elif tokens[0] in [ 'failed', 'changed' ]:
tcopy = tokens[1:]
for (i,t) in enumerate(tokens[1:]):
if t.find("$") != -1:
tcopy[i] = "is_%s(%s)" % (tokens[0], t)
else:
tcopy[i] = t
return " ".join(tcopy)
# when_integer / when_float / when_string
elif tokens[0] in [ 'integer', 'float', 'string' ]:
cast = None
if tokens[0] == 'integer':
cast = 'int'
elif tokens[0] == 'string':
cast = 'str'
elif tokens[0] == 'float':
cast = 'float'
tcopy = tokens[1:]
for (i,t) in enumerate(tokens[1:]):
if t.find("$") != -1:
# final variable substitution will happen in Runner code
tcopy[i] = "%s('''%s''')" % (cast, t)
else:
tcopy[i] = t
return " ".join(tcopy)
# when_boolean
elif tokens[0] in [ 'bool', 'boolean' ]:
tcopy = tokens[1:]
for (i, t) in enumerate(tcopy):
if t.find("$") != -1:
tcopy[i] = "(is_set('''%s''') and '''%s'''.lower() not in ('false', 'no', 'n', 'none', '0', ''))" % (t, t)
return " ".join(tcopy)
else:
raise errors.AnsibleError("invalid usage of when_ operator: %s" % expression) |
python | def critic(self, real_pred, input):
"Create some `fake_pred` with the generator from `input` and compare them to `real_pred` in `self.loss_funcD`."
fake = self.gan_model.generator(input.requires_grad_(False)).requires_grad_(True)
fake_pred = self.gan_model.critic(fake)
return self.loss_funcC(real_pred, fake_pred) |
python | def get_eco_map(url):
"""
To conver the three column file to
a hashmap we join primary and secondary keys,
for example
IEA GO_REF:0000002 ECO:0000256
IEA GO_REF:0000003 ECO:0000501
IEA Default ECO:0000501
becomes
IEA-GO_REF:0000002: ECO:0000256
IEA-GO_REF:0000003: ECO:0000501
IEA: ECO:0000501
:return: dict
"""
# this would go in a translation table but it is generated dynamicly
# maybe when we move to a make driven system
eco_map = {}
request = urllib.request.Request(url)
response = urllib.request.urlopen(request)
for line in response:
line = line.decode('utf-8').rstrip()
if re.match(r'^#', line):
continue
(code, go_ref, eco_curie) = line.split('\t')
if go_ref != 'Default':
eco_map["{}-{}".format(code, go_ref)] = eco_curie
else:
eco_map[code] = eco_curie
return eco_map |
java | public static Map<String, ValueType> rowSignatureFor(final GroupByQuery query)
{
final ImmutableMap.Builder<String, ValueType> types = ImmutableMap.builder();
for (DimensionSpec dimensionSpec : query.getDimensions()) {
types.put(dimensionSpec.getOutputName(), dimensionSpec.getOutputType());
}
for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) {
final String typeName = aggregatorFactory.getTypeName();
final ValueType valueType;
if (typeName != null) {
valueType = GuavaUtils.getEnumIfPresent(ValueType.class, StringUtils.toUpperCase(typeName));
} else {
valueType = null;
}
if (valueType != null) {
types.put(aggregatorFactory.getName(), valueType);
}
}
// Don't include post-aggregators since we don't know what types they are.
return types.build();
} |
python | def group(self, groupId):
"""
gets a group based on it's ID
"""
url = "%s/%s" % (self.root, groupId)
return Group(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False) |
java | public VertexElement addVertexElementWithEdgeIdProperty(Schema.BaseType baseType, ConceptId conceptId) {
return executeLockingMethod(() -> factory().addVertexElementWithEdgeIdProperty(baseType, conceptId));
} |
java | @Override
public File copy() throws IORuntimeException{
final File src = this.src;
final File dest = this.dest;
// check
Assert.notNull(src, "Source File is null !");
if (false == src.exists()) {
throw new IORuntimeException("File not exist: " + src);
}
Assert.notNull(dest, "Destination File or directiory is null !");
if (FileUtil.equals(src, dest)) {
throw new IORuntimeException("Files '{}' and '{}' are equal", src, dest);
}
if (src.isDirectory()) {// 复制目录
if(dest.exists() && false == dest.isDirectory()) {
//源为目录,目标为文件,抛出IO异常
throw new IORuntimeException("Src is a directory but dest is a file!");
}
final File subDest = isCopyContentIfDir ? dest : FileUtil.mkdir(FileUtil.file(dest, src.getName()));
internalCopyDirContent(src, subDest);
} else {// 复制文件
internalCopyFile(src, dest);
}
return dest;
} |
java | public ResultList<Company> searchCompanies(String query, Integer page) throws MovieDbException {
TmdbParameters parameters = new TmdbParameters();
parameters.add(Param.QUERY, query);
parameters.add(Param.PAGE, page);
URL url = new ApiUrl(apiKey, MethodBase.SEARCH).subMethod(MethodSub.COMPANY).buildUrl(parameters);
WrapperGenericList<Company> wrapper = processWrapper(getTypeReference(Company.class), url, "company");
return wrapper.getResultsList();
} |
python | def get_alert_community(self, channel=None):
"""Get the current community string for alerts
Returns the community string that will be in SNMP traps from this
BMC
:param channel: The channel to get configuration for, autodetect by
default
:returns: The community string
"""
if channel is None:
channel = self.get_network_channel()
rsp = self.xraw_command(netfn=0xc, command=2, data=(channel, 16, 0, 0))
return rsp['data'][1:].partition('\x00')[0] |
java | public Observable<Void> deleteManagementPoliciesAsync(String resourceGroupName, String accountName) {
return deleteManagementPoliciesWithServiceResponseAsync(resourceGroupName, accountName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
} |
python | def unzip_file_to_dir(path_to_zip, output_directory):
"""
Extract a ZIP archive to a directory
"""
z = ZipFile(path_to_zip, 'r')
z.extractall(output_directory)
z.close() |
java | @SuppressWarnings("unchecked")
public <T> Map<String, T> find(final Class<T> valueTypeToFind) {
return (Map<String, T>) this.values.entrySet().stream()
.filter(input -> valueTypeToFind.isInstance(input.getValue()))
.collect(
Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
} |
python | def parse_z(cls, offset):
""" Parse %z offset into `timedelta` """
assert len(offset) == 5, 'Invalid offset string format, must be "+HHMM"'
return timedelta(hours=int(offset[:3]), minutes=int(offset[0] + offset[3:])) |
python | def reloading_meta_metaclass_factory(BASE_TYPE=type):
""" hack for pyqt """
class ReloadingMetaclass2(BASE_TYPE):
def __init__(metaself, name, bases, dct):
super(ReloadingMetaclass2, metaself).__init__(name, bases, dct)
#print('Making rrr for %r' % (name,))
metaself.rrr = reload_class
return ReloadingMetaclass2 |
java | @Override
protected void onCreate(Bundle savedInstanceState) {
final RoboInjector injector = RoboGuice.getInjector(this);
eventManager = injector.getInstance(EventManager.class);
preferenceListener = injector.getInstance(PreferenceListener.class);
injector.injectMembersWithoutViews(this);
super.onCreate(savedInstanceState);
eventManager.fire(new OnCreateEvent(savedInstanceState));
} |
java | public static List<Method> getAllMethod(Class<?> clazz) {
return ALL_METHOD_CACHE.compute(clazz, (key, value) -> {
if (value == null) {
List<Method> methods = new ArrayList<>();
Map<String, Method> methodMap = getAllMethod(key, new HashMap<>());
methodMap.forEach((k, v) -> methods.add(v));
return methods;
} else {
return value;
}
});
} |
python | def SaveDataToFD(self, raw_data, fd):
"""Merge the raw data with the config file and store it."""
fd.write(yaml.Dump(raw_data).encode("utf-8")) |
python | def readBatchTupleQuotes(self, symbols, start, end):
'''
read batch quotes as tuple to save memory
'''
if end is None:
end=sys.maxint
ret={}
session=self.getReadSession()()
try:
symbolChunks=splitListEqually(symbols, 100)
for chunk in symbolChunks:
rows=session.query(Quote.symbol, Quote.time, Quote.close, Quote.volume,
Quote.low, Quote.high).filter(and_(Quote.symbol.in_(chunk),
Quote.time >= int(start),
Quote.time < int(end)))
for row in rows:
if row.time not in ret:
ret[row.time]={}
ret[row.time][row.symbol]=self.__sqlToTupleQuote(row)
finally:
self.getReadSession().remove()
return ret |
python | def set_caller(self, caller):
"""
Sets the caller after instantiation.
"""
self.caller = caller.__name__
self.module = inspect.getmodule(caller).__name__
self.load() |
java | private final void eval(TagLibTag tlt, Data data, Tag tag) throws TemplateException {
if (tlt.hasTTE()) {
try {
tlt.getEvaluator().execute(data.config, tag, tlt, data.flibs, data);
}
catch (EvaluatorException e) {
throw new TemplateException(data.srcCode, e.getMessage());
}
data.ep.add(tlt, tag, data.flibs, data.srcCode);
}
} |
python | def encode(string, encoding=None, errors=None):
"""Encode to specified encoding.
``encoding`` defaults to the preferred encoding.
``errors`` defaults to the preferred error handler.
"""
if encoding is None:
encoding = getpreferredencoding()
if errors is None:
errors = getpreferrederrors()
return string.encode(encoding, errors) |
java | @Override
public GetPipelineDefinitionResult getPipelineDefinition(GetPipelineDefinitionRequest request) {
request = beforeClientExecution(request);
return executeGetPipelineDefinition(request);
} |
java | public void setServerEvents(java.util.Collection<ServerEvent> serverEvents) {
if (serverEvents == null) {
this.serverEvents = null;
return;
}
this.serverEvents = new java.util.ArrayList<ServerEvent>(serverEvents);
} |
python | def eccentricity(self, directed=None, weighted=None):
'''Maximum distance from each vertex to any other vertex.'''
sp = self.shortest_path(directed=directed, weighted=weighted)
return sp.max(axis=0) |
java | public void unsubscribe(final Subscription<T> subscription) {
final T token = subscription.getToken();
LOG.log(Level.FINER, "RemoteManager: {0} token {1}", new Object[]{this.name, token});
if (token instanceof Exception) {
this.transport.registerErrorHandler(null);
} else if (token instanceof Tuple2) {
this.tupleToHandlerMap.remove(token);
} else if (token instanceof Class) {
this.msgTypeToHandlerMap.remove(token);
} else {
throw new RemoteRuntimeException(
"Unknown subscription type: " + subscription.getClass().getCanonicalName());
}
} |
python | def login_details(self):
"""
Gets the login details
Returns:
List of login details
"""
if not self.__login_details:
self.__login_details = LoginDetails(self.__connection)
return self.__login_details |
python | def refresh(self, line=None):
"""Refreshes progress bar."""
# Just go away if it is locked. Will update next time
if not self._lock.acquire(False):
return
if line is None:
line = self._line
if sys.stdout.isatty() and line is not None:
self._writeln(line)
self._line = line
self._lock.release() |
java | private static InputStream detatchResponseHeader(BufferedInputStream pIS) throws IOException {
// Store header in byte array
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
pIS.mark(BUF_SIZE);
byte[] buffer = new byte[BUF_SIZE];
int length;
int headerEnd;
// Read from iput, store in bytes
while ((length = pIS.read(buffer)) != -1) {
// End of header?
headerEnd = findEndOfHeader(buffer, length);
if (headerEnd >= 0) {
// Write rest
bytes.write(buffer, 0, headerEnd);
// Go back to last mark
pIS.reset();
// Position stream to right after header, and exit loop
pIS.skip(headerEnd);
break;
}
else if (headerEnd < -1) {
// Write partial (except matching header bytes)
bytes.write(buffer, 0, length - 4);
// Go back to last mark
pIS.reset();
// Position stream to right before potential header end
pIS.skip(length - 4);
}
else {
// Write all
bytes.write(buffer, 0, length);
}
// Can't read more than BUF_SIZE ahead anyway
pIS.mark(BUF_SIZE);
}
return new ByteArrayInputStream(bytes.toByteArray());
} |
java | public void remove (SteeringBehavior<T> behavior) {
for (int i = 0; i < list.size; i++) {
if(list.get(i).behavior == behavior) {
list.removeIndex(i);
return;
}
}
} |
python | def parse_datetime(utc_timestamp_ms, utc_offset_ms):
"""
Create a timezone-aware ``datetime.datetime`` from the given UTC timestamp
(in milliseconds), if provided. If an offest it given, it is applied to the
datetime returned.
Parameters
----------
utc_timestamp_ms
UTC timestamp in milliseconds.
utc_offset_ms
Offset from UTC, in milliseconds, to apply to the time.
Returns
-------
A ``datetime.datetime`` if a timestamp is given, otherwise ``None``.
"""
if utc_timestamp_ms:
utc_timestamp_s = utc_timestamp_ms / 1000
dt = datetime.datetime.fromtimestamp(utc_timestamp_s, tzutc())
if utc_offset_ms:
utc_offset_s = utc_offset_ms / 1000
tz_offset = tzoffset(None, utc_offset_s)
dt = dt.astimezone(tz_offset)
return dt |
java | @Override
public void flush() throws IOException
{
checkState();
uncheckedWriteTillByte(false);
try
{
out.flush();
}
catch(IOException e)
{
error = true;
throw e;
}
} |
python | def write_last_and_beds(pf, GenePositions, ContigStarts):
"""
Write LAST file, query and subject BED files.
"""
qbedfile = pf + "tigs.bed"
sbedfile = pf + "chr.bed"
lastfile = "{}tigs.{}chr.last".format(pf, pf)
qbedfw = open(qbedfile, "w")
sbedfw = open(sbedfile, "w")
lastfw = open(lastfile, "w")
GeneContigs = np.searchsorted(ContigStarts, GenePositions) - 1
for i, (c, gstart) in enumerate(zip(GeneContigs, GenePositions)):
gene = "gene{:05d}".format(i)
tig = "tig{:04d}".format(c)
start = ContigStarts[c]
cstart = gstart - start
print("\t".join(str(x) for x in
(tig, cstart, cstart + 1, gene)), file=qbedfw)
print("\t".join(str(x) for x in
("chr1", gstart, gstart + 1, gene)), file=sbedfw)
lastatoms = [gene, gene, 100] + [0] * 8 + [100]
print("\t".join(str(x) for x in lastatoms), file=lastfw)
qbedfw.close()
sbedfw.close()
lastfw.close() |
python | def run_with_plugins(plugin_list):
"""
Carry out a test run with the supplied list of plugin instances.
The plugins are expected to identify the object to run.
Parameters:
plugin_list: a list of plugin instances (objects which implement some subset of PluginInterface)
Returns: exit code as an integer.
The default behaviour (which may be overridden by plugins) is to return a 0
exit code if the test run succeeded, and 1 if it failed.
"""
composite = core.PluginComposite(plugin_list)
to_run = composite.get_object_to_run()
test_run = core.TestRun(to_run, composite)
test_run.run()
return composite.get_exit_code() |
python | def is_response(cls, response):
'''Return whether the document is likely to be CSS.'''
if 'css' in response.fields.get('content-type', '').lower():
return True
if response.body:
# Stylesheet mistakenly served as HTML
if 'html' in response.fields.get('content-type', '').lower():
return cls.is_file(response.body) |
java | public void await() throws InterruptedException, EOFException
{
synchronized(this)
{
while ( (state.get() == State.STARTED) && !hasLeadership.get() )
{
wait();
}
}
if ( state.get() != State.STARTED )
{
throw new EOFException();
}
} |
java | public DbxDelta<DbxEntry> getDelta(/*@Nullable*/String cursor, boolean includeMediaInfo)
throws DbxException
{
return _getDelta(cursor, null, includeMediaInfo);
} |
python | def ConsultarCondicionesVenta(self, sep="||"):
"Retorna un listado de códigos y descripciones de las condiciones de ventas"
ret = self.client.consultarCondicionesVenta(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
)['respuesta']
self.__analizar_errores(ret)
array = ret.get('condicionVenta', [])
if sep is None:
return dict([(it['codigo'], it['descripcion']) for it in array])
else:
return [("%s %%s %s %%s %s" % (sep, sep, sep)) %
(it['codigo'], it['descripcion']) for it in array] |
python | def get(self, entity_id: EntityId, load: bool = False) -> Entity:
"""Get a Wikidata entity by its :class:`~.entity.EntityId`.
:param entity_id: The :attr:`~.entity.Entity.id` of
the :class:`~.entity.Entity` to find.
:type eneity_id: :class:`~.entity.EntityId`
:param load: Eager loading on :const:`True`.
Lazy loading (:const:`False`) by default.
:type load: :class:`bool`
:return: The found entity.
:rtype: :class:`~.entity.Entity`
.. versionadded:: 0.3.0
The ``load`` option.
"""
try:
entity = self.identity_map[entity_id]
except KeyError:
entity = Entity(entity_id, self)
self.identity_map[entity_id] = entity
if load:
entity.load()
return entity |
java | public <T> T executeWithUser(final String user, final String password,
final SpecificUserAction<T> userAction) {
Preconditions.checkState(!transactionManager.isTransactionActive(),
"User can't be changed during transaction");
Preconditions.checkState(specificUser.get() == null,
"Specific user already defined as '%s'",
specificUser.get() != null ? specificUser.get().user : null);
specificUser.set(create(user, password));
logger.trace("Use specific user: {}", user);
T result = null;
try {
result = userAction.execute();
} catch (Throwable th) {
Throwables.throwIfUnchecked(th);
throw new UserActionException(String.format("Failed to perform operation under user '%s'", user), th);
} finally {
specificUser.remove();
}
return result;
} |
java | private DestinationHandler createForeignDestination(DestinationForeignDefinition dfd, String busName) throws SIResourceException, SINotPossibleInCurrentConfigurationException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "createForeignDestination", new Object[] { dfd, busName });
ForeignDestinationHandler fdh = null;
//The lock on the destinationManager is taken to stop 2 threads creating the same
//destination and also to synchronize dynamic deletes with the
//creation of aliases. This stops an alias destination being created that targets a
//destination in the process of being deleted.
synchronized (this)
{
// Create a new DestinationHandler, which is created locked
fdh = new ForeignDestinationHandler(dfd, messageProcessor, this, findBus(dfd.getBus()), busName);
DestinationIndex.Type type = new DestinationIndex.Type();
type.foreignDestination = Boolean.TRUE;
type.alias = Boolean.FALSE;
type.local = Boolean.FALSE;
type.remote = Boolean.FALSE;
type.queue = new Boolean(!fdh.isPubSub());
type.state = State.ACTIVE;
destinationIndex.put(fdh, type);
fdh.registerControlAdapters();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "createForeignDestination", fdh);
return fdh;
} |
java | private void endWithMessage(ServerHttpExchange http, String data) {
endedWithMessage.set(true);
boolean jsonp = "true".equals(params.get("cettia-transport-jsonp"));
if (jsonp) {
try {
data = params.get("cettia-transport-callback") + "(" + mapper.writeValueAsString(data) + ");";
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
http.setHeader("content-type", (jsonp ? "text/javascript" : "text/plain") + "; charset=utf-8").end(data);
} |
java | private static String unescapeColors (String txt, boolean restore)
{
if (txt == null) return null;
String prefix = restore ? "#" : "%";
return ESCAPED_PATTERN.matcher(txt).replaceAll(prefix + "$1");
} |
java | public void configureCreateNewElement(CmsObject cms, String pageFolderRootPath, CmsNewResourceBuilder builder)
throws CmsException {
checkOffline(cms);
checkInitialized();
String folderPath = getFolderPath(cms, pageFolderRootPath);
CmsVfsUtil.createFolder(cms, folderPath);
String destination = CmsStringUtil.joinPaths(folderPath, getNamePattern(true));
builder.setSiteRoot("");
builder.setPatternPath(destination);
builder.setType(getTypeName());
builder.setLocale(cms.getRequestContext().getLocale());
} |
python | def rsync_git(local_path, remote_path, exclude=None, extra_opts=None,
version_file='version.txt'):
"""Rsync deploy a git repo. Write and compare version.txt"""
with settings(hide('output', 'running'), warn_only=True):
print(green('Version On Server: ' + run('cat ' + '{}/{}'.format(
remote_path, version_file)).strip()))
print(green('Now Deploying Version ' +
write_version(join(local_path, version_file))))
rsync(local_path, remote_path, exclude, extra_opts) |
python | def notches( self ):
"""
Reutrns a list of the notches that are going to be used for this ruler.
If the notches have not been explicitly set (per a Custom type), then
the notches will be generated based on the minimum, maximum and step
values the current ruler type.
:return [<str>, ..]
"""
if ( self._notches is not None ):
return self._notches
rtype = self.rulerType()
formatter = self.formatter()
format = self.format()
self._notches = []
minimum = self.minimum()
maximum = self.maximum()
step = self.step()
if ( step <= 0 ):
return []
curr = minimum
while ( curr < maximum ):
self._notches.append(self.formatValue(curr))
if ( rtype == XChartRuler.Type.Number ):
curr += step
elif ( rtype == XChartRuler.Type.Date ):
curr = curr.addDays(step)
elif ( rtype in (XChartRuler.Type.Datetime, XChartRuler.Type.Time)):
curr = curr.addSecs(step)
else:
break
self._notches.append(self.formatValue(maximum))
return self._notches |
java | public static <T> int countObjectsFromClause(Class<T> clazz, String clause, Object... args)
{
return SqlClosure.sqlExecute(c -> OrmElf.countObjectsFromClause(c, clazz, clause, args));
} |
java | public static String ifNotEmptyAppend(String chekString, String value) {
if (hasText(chekString)) {
return value+chekString;
} else {
return "";
}
} |
java | public FrenchRepublicanMonth getMonth() {
if (this.fdoy > 360) {
throw new ChronoException(
"Complementary days (sansculottides) do not represent any month: " + this.toString());
}
int m = ((this.fdoy - 1) / 30) + 1;
return FrenchRepublicanMonth.valueOf(m);
} |
java | private ImmutableTriple<Integer,ResultSet,SQLWarning> update(final String sql, final int autoGeneratedKeys) throws SQLException {
checkClosed();
try {
final UpdateResult res = this.handler.whenSQLUpdate(sql, NO_PARAMS);
final SQLWarning w = res.getWarning();
final ResultSet k = (res.generatedKeys == null
|| autoGeneratedKeys == NO_GENERATED_KEYS)
? EMPTY_GENERATED_KEYS.withStatement(this)
: res.generatedKeys.resultSet().withStatement(this);
return ImmutableTriple.of(res.getUpdateCount(), k, w);
} catch (SQLException se) {
throw se;
} catch (Exception e) {
throw new SQLException(e.getMessage(), e);
} // end of catch
} |
java | public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case OUTPUT_FIELDS:
return is_set_output_fields();
case DIRECT:
return is_set_direct();
}
throw new IllegalStateException();
} |
python | def remove_prohibited_element(tag_name, document_element):
"""
To fit the Evernote DTD need, drop this tag name
"""
elements = document_element.getElementsByTagName(tag_name)
for element in elements:
p = element.parentNode
p.removeChild(element) |
python | def read_relation_file(filename):
'''
Reads the GermaNet relation file ``gn_relations.xml`` which lists
all the relations holding between lexical units and synsets.
Arguments:
- `filename`:
'''
with open(filename, 'rb') as input_file:
doc = etree.parse(input_file)
lex_rels = []
con_rels = []
assert doc.getroot().tag == 'relations'
for child in doc.getroot():
if child.tag == 'lex_rel':
if 0 < len(child):
print('<lex_rel> has unexpected child node')
child_dict = dict(child.items())
warn_attribs('', child, RELATION_ATTRIBS, RELATION_ATTRIBS_REQD)
if child_dict['dir'] not in LEX_REL_DIRS:
print('unrecognized <lex_rel> dir', child_dict['dir'])
if child_dict['dir'] == 'both' and 'inv' not in child_dict:
print('<lex_rel> has dir=both but does not specify inv')
lex_rels.append(child_dict)
elif child.tag == 'con_rel':
if 0 < len(child):
print('<con_rel> has unexpected child node')
child_dict = dict(child.items())
warn_attribs('', child, RELATION_ATTRIBS, RELATION_ATTRIBS_REQD)
if child_dict['dir'] not in CON_REL_DIRS:
print('unrecognised <con_rel> dir', child_dict['dir'])
if (child_dict['dir'] in ['both', 'revert'] and
'inv' not in child_dict):
print('<con_rel> has dir={0} but does not specify inv'.format(
child_dict['dir']))
con_rels.append(child_dict)
else:
print('unrecognised child of <relations>', child)
continue
return lex_rels, con_rels |
java | public static AuditorModuleContext getContext()
{
SecurityContext securityContext = SecurityContextFactory.getSecurityContext();
if (!securityContext.isInitialized()) {
securityContext.initialize();
}
AbstractModuleContext moduleContext = securityContext.getModuleContext(CONTEXT_ID);
if (null == moduleContext || !(moduleContext instanceof AuditorModuleContext)) {
moduleContext = ContextInitializer.defaultInitialize();
securityContext.registerModuleContext(CONTEXT_ID, moduleContext);
}
return (AuditorModuleContext)moduleContext;
} |
java | @SuppressWarnings("unchecked")
@FFDCIgnore(Exception.class)
public static Collection<String> parseStringCollection(String propertyKey, Object obj, Collection<String> defaultValue) {
if (obj != null) {
try {
if (obj instanceof Collection) {
return (Collection<String>) obj;
} else if (obj instanceof String) {
String commaList = (String) obj;
// split the string, consuming/removing whitespace
return Arrays.asList(commaList.split("\\s*,\\s*"));
} else if (obj instanceof String[]) {
return Arrays.asList((String[]) obj);
}
} catch (Exception e) {
throw new IllegalArgumentException("Collection of strings could not be parsed: key=" + propertyKey + ", value=" + obj, e);
}
// unknown type
throw new IllegalArgumentException("Collection of strings could not be parsed: key=" + propertyKey + ", value=" + obj);
}
return defaultValue;
} |
python | def main():
""""This main function implements the CLI"""
parser = argparse.ArgumentParser(description='Do something awesome')
parser.add_argument('input_list', nargs='+',
type=str, default=sys.stdin)
parser.add_argument('-o', '--outfile', nargs='?',
type=argparse.FileType('w'), default=sys.stdout)
args = parser.parse_args()
input_list = args.input_list
print '\n'.join(get_ips_from_cidr_subnets(input_list)) |
java | public Context merge(final OutputStream outputStream) {
return merge(Vars.EMPTY, new OutputStreamOut(outputStream, engine));
} |
python | def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'),
os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(
base_url=base_url, tls=tls_config, version='1.15', timeout=timeout
) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.