language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
// if (tempLen != 0)
// System.out.println(new String(cb, tempOffe, tempLen));
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { // 如果不是需要读状态,那么返回
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
// 如果是需要读状态那么读取
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
} |
java | private boolean pathExists(T u, T v, boolean includeAdjacent) {
if (!nodes.contains(u) || !nodes.contains(v)) {
return false;
}
if (includeAdjacent && isAdjacent(u, v)) {
return true;
}
Deque<T> stack = new LinkedList<>();
Set<T> visited = new HashSet<>();
stack.push(u);
while (!stack.isEmpty()) {
T node = stack.pop();
if (node.equals(v)) {
return true;
}
if (!visited.contains(node)) {
visited.add(node);
edges.get(node).stream()
.filter(e -> includeAdjacent || !node.equals(u) || !e.equals(v))
.forEach(stack::push);
}
}
assert !visited.contains(v);
return false;
} |
java | private List getEntityObjects(Class clazz, final EntityMetadata entityMetadata, EntityType entityType,
SearchHits hits)
{
List results = new ArrayList();
Object entity = null;
for (SearchHit hit : hits.getHits())
{
entity = KunderaCoreUtils.createNewInstance(clazz);
Map<String, Object> hitResult = hit.sourceAsMap();
results.add(wrap(hitResult, entityType, entity, entityMetadata, false));
}
return results;
} |
python | def get_bios(self):
"""
Gets the list of BIOS/UEFI values currently set on the physical server.
Returns:
dict: Dictionary of BIOS/UEFI values.
"""
uri = "{}/bios".format(self.data["uri"])
return self._helper.do_get(uri) |
python | def circle(radius=None, center=None, **kwargs):
"""
Create a Path2D containing a single or multiple rectangles
with the specified bounds.
Parameters
--------------
bounds : (2, 2) float, or (m, 2, 2) float
Minimum XY, Maximum XY
Returns
-------------
rect : Path2D
Path containing specified rectangles
"""
from .path import Path2D
if center is None:
center = [0.0, 0.0]
else:
center = np.asanyarray(center, dtype=np.float64)
if radius is None:
radius = 1.0
else:
radius = float(radius)
# (3, 2) float, points on arc
three = arc.to_threepoint(angles=[0, np.pi],
center=center,
radius=radius) + center
result = Path2D(entities=[entities.Arc(points=np.arange(3), closed=True)],
vertices=three,
**kwargs)
return result |
java | public CircuitBreakerBuilder counterUpdateInterval(Duration counterUpdateInterval) {
requireNonNull(counterUpdateInterval, "counterUpdateInterval");
if (counterUpdateInterval.isNegative() || counterUpdateInterval.isZero()) {
throw new IllegalArgumentException(
"counterUpdateInterval: " + counterUpdateInterval + " (expected: > 0)");
}
this.counterUpdateInterval = counterUpdateInterval;
return this;
} |
java | @Execute
public HtmlResponse index(final SearchForm form) {
validate(form, messages -> {}, () -> asDictIndexHtml());
stemmerOverridePager.clear();
return asHtml(path_AdminDictStemmeroverride_AdminDictStemmeroverrideJsp).renderWith(data -> {
searchPaging(data, form);
});
} |
java | @Override
public SqlAgentFactory setQueryTimeout(final int queryTimeout) {
getDefaultProps().put(PROPS_KEY_QUERY_TIMEOUT, String.valueOf(queryTimeout));
return this;
} |
python | def fetchall(self):
"""
As in DBAPI2.0 (except the fact rows are not tuples but
lists so if you try to modify them, you will succeed instead of
the correct behavior that would be that an exception would have
been raised)
Additionally every row returned by this class is addressable
by column name besides the column position in the query.
"""
try:
allrows = self.__dbapi2_cursor.fetchall()
except Exception, e:
self.__connection.reconnect(None, self.__log_reconnect)
self.__dbapi2_cursor = self.__connection._get_raw_cursor()
allrows = self.__dbapi2_cursor.fetchall()
if not allrows:
return allrows
else:
return [self.row(self.__col2idx_map, dbapi2_row) for dbapi2_row in allrows] |
java | private static <P extends GISPrimitive, N extends AbstractGISTreeSetNode<P, N>>
N rearrangeTree(AbstractGISTreeSet<P, N> tree, N node, Rectangle2afp<?, ?, ?, ?, ?, ?> desiredBounds,
GISTreeSetNodeFactory<P, N> builder) {
// Search for the node that completely contains the desired area
N topNode = node.getParentNode();
while (topNode != null && isOutsideNodeBuildingBounds(topNode, desiredBounds)) {
topNode = topNode.getParentNode();
}
final Rectangle2afp<?, ?, ?, ?, ?, ?> dr;
if (topNode == null) {
// Node node found, the entire tree should be rebuilt
topNode = tree.getTree().getRoot();
if (topNode == null) {
throw new IllegalStateException();
}
dr = union(topNode, desiredBounds);
} else {
dr = getNormalizedNodeBuildingBounds(topNode, desiredBounds);
}
// Build a new subtree
final N parent = topNode.getParentNode();
final Iterator<P> dataIterator = new PrefixDataDepthFirstTreeIterator<>(topNode);
final N newTopNode = builder.newNode(
topNode.getZone(),
dr.getMinX(), dr.getMinY(), dr.getWidth(), dr.getHeight());
while (dataIterator.hasNext()) {
if (!addInside(tree, newTopNode, dataIterator.next(), builder, false)) {
throw new IllegalStateException();
}
}
// Replace rearranged subtree by the new one
if (parent != null) {
parent.setChildAt(topNode.getZone().ordinal(), newTopNode);
return parent;
}
tree.getTree().setRoot(newTopNode);
return newTopNode;
} |
python | def profileit(path=None):
"""cProfile decorator to profile a function
:param path: output file path
:type path: str
:return: Function
"""
def inner(func):
@wraps(func)
def wrapper(*args, **kwargs):
prof = cProfile.Profile()
retval = prof.runcall(func, *args, **kwargs)
if path is not None:
print prof.print_stats()
prof.dump_stats(os.path.expanduser(path))
else:
print prof.print_stats()
return retval
return wrapper
return inner |
python | def _concrete_acl(self, acl_doc):
"""Concretize an ACL document.
:param dict acl_doc: A document describing an ACL entry. Should come from the API.
:returns: An :py:class:`Acl`, or None.
:rtype: :py:class:`bases.BaseInstance`
"""
if not isinstance(acl_doc, dict):
return None
# Attempt to instantiate an Acl object with the given dict.
try:
return Acl(document=acl_doc, acls=self)
# If construction fails, log the exception and return None.
except Exception as ex:
logger.exception(ex)
logger.error('Could not instantiate ACL document. You probably need to upgrade to a '
'recent version of the client. Document which caused this error: {}'
.format(acl_doc))
return None |
java | public VirtualMachine findByDatastorePath(Datacenter datacenter, String dPath) throws InvalidDatastore, RuntimeFault, RemoteException {
if (datacenter == null) {
throw new IllegalArgumentException("datacenter must not be null.");
}
ManagedObjectReference mor = getVimService().findByDatastorePath(getMOR(), datacenter.getMOR(), dPath);
return (VirtualMachine) MorUtil.createExactManagedEntity(getServerConnection(), mor);
} |
java | @PostMapping(value = "/attributeMapping/semanticsearch", consumes = APPLICATION_JSON_VALUE)
@ResponseBody
public List<ExplainedAttributeDto> getSemanticSearchAttributeMapping(
@RequestBody Map<String, String> requestBody) {
String mappingProjectId = requestBody.get("mappingProjectId");
String target = requestBody.get("target");
String source = requestBody.get("source");
String targetAttributeName = requestBody.get("targetAttribute");
String searchTermsString = requestBody.get("searchTerms");
Set<String> searchTerms = new HashSet<>();
if (StringUtils.isNotBlank(searchTermsString)) {
searchTerms.addAll(
Sets.newHashSet(searchTermsString.toLowerCase().split("\\s+or\\s+"))
.stream()
.filter(StringUtils::isNotBlank)
.map(String::trim)
.collect(Collectors.toSet()));
}
MappingProject project = mappingService.getMappingProject(mappingProjectId);
MappingTarget mappingTarget = project.getMappingTarget(target);
EntityMapping entityMapping = mappingTarget.getMappingForSource(source);
Attribute targetAttribute =
entityMapping.getTargetEntityType().getAttribute(targetAttributeName);
AttributeSearchResults attributeSearchResults =
semanticSearchService.findAttributes(
entityMapping.getSourceEntityType(),
entityMapping.getTargetEntityType(),
targetAttribute,
searchTerms);
// If no relevant attributes are found, return all source attributes
if (attributeSearchResults.getHits().iterator().hasNext()) {
return stream(entityMapping.getSourceEntityType().getAtomicAttributes())
.filter(attribute -> attribute.getDataType() != COMPOUND)
.map(ExplainedAttributeDto::create)
.collect(toList());
}
return stream(attributeSearchResults.getHits())
.map(Hit::getResult)
.map(this::toExplainedAttributeDto)
.collect(toList());
} |
java | public static snmp_alarm_config update(nitro_service client, snmp_alarm_config resource) throws Exception
{
resource.validate("modify");
return ((snmp_alarm_config[]) resource.update_resource(client))[0];
} |
python | def resample(self, data, cache_dir=None, mask_area=None, **kwargs):
"""Resample `data` by calling `precompute` and `compute` methods.
Only certain resampling classes may use `cache_dir` and the `mask`
provided when `mask_area` is True. The return value of calling the
`precompute` method is passed as the `cache_id` keyword argument
of the `compute` method, but may not be used directly for caching. It
is up to the individual resampler subclasses to determine how this
is used.
Args:
data (xarray.DataArray): Data to be resampled
cache_dir (str): directory to cache precomputed results
(default False, optional)
mask_area (bool): Mask geolocation data where data values are
invalid. This should be used when data values
may affect what neighbors are considered valid.
Returns (xarray.DataArray): Data resampled to the target area
"""
# default is to mask areas for SwathDefinitions
if mask_area is None and isinstance(
self.source_geo_def, SwathDefinition):
mask_area = True
if mask_area:
if isinstance(self.source_geo_def, SwathDefinition):
geo_dims = self.source_geo_def.lons.dims
else:
geo_dims = ('y', 'x')
flat_dims = [dim for dim in data.dims if dim not in geo_dims]
# xarray <= 0.10.1 computes dask arrays during isnull
if np.issubdtype(data.dtype, np.integer):
kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max)
else:
kwargs['mask'] = data.isnull()
kwargs['mask'] = kwargs['mask'].all(dim=flat_dims)
cache_id = self.precompute(cache_dir=cache_dir, **kwargs)
return self.compute(data, cache_id=cache_id, **kwargs) |
python | def select_locale_by_request(self, request, locales=()):
"""Choose an user's locales by request."""
default_locale = locales and locales[0] or self.cfg.default_locale
if len(locales) == 1 or 'ACCEPT-LANGUAGE' not in request.headers:
return default_locale
ulocales = [
(q, locale_delim_re.split(v)[0])
for v, q in parse_accept_header(request.headers['ACCEPT-LANGUAGE'])
]
ulocales.sort()
ulocales.reverse()
for locale in locales:
for _, ulocale in ulocales:
ulocale = locale_delim_re.split(ulocale)[0]
if ulocale.lower() == locale.lower():
return ulocale
return ulocales[0][1] |
java | protected XmlElement transformElement(org.dom4j.Element node) {
XmlElement xe = new XmlElement(node.getName());
// 设置元素的属性
@SuppressWarnings("unchecked")
Iterator<org.dom4j.Attribute> iterator = node.attributeIterator();
while (iterator.hasNext()) {
org.dom4j.Attribute ab = iterator.next();
xe.addAttribute(new Attribute(ab.getName(), ab.getValue()));
}
// 深度优先遍历子节点
@SuppressWarnings("unchecked")
Iterator<org.dom4j.Node> niter = node.nodeIterator();
while (niter.hasNext()) {
org.dom4j.Node n = niter.next();
// 文本节点
if (n.getNodeType() == org.dom4j.Node.TEXT_NODE) {
Text text = (Text) n;
TextElement te = new TextElement(text.getText().trim());
xe.addElement(te);
}
// 元素节点
if (n.getNodeType() == org.dom4j.Node.ELEMENT_NODE) {
xe.addElement(transformElement((org.dom4j.Element) n));
}
// 注释节点
if (n.getNodeType() == org.dom4j.Node.COMMENT_NODE) {
TextElement te = new TextElement(n.asXML().trim());
xe.addElement(te);
}
// CDATA 节点
if (n.getNodeType() == org.dom4j.Node.CDATA_SECTION_NODE) {
TextElement te = new TextElement(n.asXML().trim());
xe.addElement(te);
}
}
return xe;
} |
python | def get_fd_waveform(template=None, **kwargs):
"""Return a frequency domain gravitational waveform.
Parameters
----------
template: object
An object that has attached properties. This can be used to substitute
for keyword arguments. A common example would be a row in an xml table.
{params}
Returns
-------
hplustilde: FrequencySeries
The plus phase of the waveform in frequency domain.
hcrosstilde: FrequencySeries
The cross phase of the waveform in frequency domain.
"""
input_params = props(template, required_args=fd_required_args, **kwargs)
wav_gen = fd_wav[type(_scheme.mgr.state)]
if input_params['approximant'] not in wav_gen:
raise ValueError("Approximant %s not available" %
(input_params['approximant']))
try:
ffunc = input_params.pop('f_final_func')
if ffunc != '':
# convert the frequency function to a value
input_params['f_final'] = pnutils.named_frequency_cutoffs[ffunc](
input_params)
# if the f_final is < f_lower, raise a NoWaveformError
if 'f_final' in input_params and \
(input_params['f_lower']+input_params['delta_f'] >=
input_params['f_final']):
raise NoWaveformError("cannot generate waveform: f_lower >= f_final")
except KeyError:
pass
return wav_gen[input_params['approximant']](**input_params) |
java | public CmsXmlGroupContainer getCacheGroupContainer(String key, boolean online) {
try {
m_lock.readLock().lock();
CmsXmlGroupContainer retValue;
if (online) {
retValue = m_groupContainersOnline.get(key);
if (LOG.isDebugEnabled()) {
if (retValue == null) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MISSED_ONLINE_1,
new Object[] {key}));
} else {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MATCHED_ONLINE_2,
new Object[] {key, retValue}));
}
}
} else {
retValue = m_groupContainersOffline.get(key);
if (LOG.isDebugEnabled()) {
if (retValue == null) {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MISSED_OFFLINE_1,
new Object[] {key}));
} else {
LOG.debug(
Messages.get().getBundle().key(
Messages.LOG_DEBUG_CACHE_MATCHED_OFFLINE_2,
new Object[] {key, retValue}));
}
}
}
return retValue;
} finally {
m_lock.readLock().unlock();
}
} |
java | public static Entry namedObject(String name, Dn baseDn) {
Dn dn = LdapUtils.concatDn(SchemaConstants.CN_ATTRIBUTE, name, baseDn);
Entry entry = new DefaultEntry(dn);
try {
entry.add(SchemaConstants.OBJECT_CLASS_ATTRIBUTE, SchemaConstants.NAMED_OBJECT_OC);
entry.add(SchemaConstants.CN_ATTRIBUTE, name);
} catch (LdapException e) {
throw new LdapRuntimeException(e);
}
return entry;
} |
python | def fromFile(cls, person, inputFile, format):
"""
Create a L{Mugshot} item for C{person} out of the image data in
C{inputFile}, or update C{person}'s existing L{Mugshot} item to
reflect the new images.
@param inputFile: An image of a person.
@type inputFile: C{file}
@param person: The person this mugshot is to be associated with.
@type person: L{Person}
@param format: The format of the data in C{inputFile}.
@type format: C{unicode} (e.g. I{jpeg})
@rtype: L{Mugshot}
"""
body = cls.makeThumbnail(inputFile, person, format, smaller=False)
inputFile.seek(0)
smallerBody = cls.makeThumbnail(
inputFile, person, format, smaller=True)
ctype = u'image/' + format
self = person.store.findUnique(
cls, cls.person == person, default=None)
if self is None:
self = cls(store=person.store,
person=person,
type=ctype,
body=body,
smallerBody=smallerBody)
else:
self.body = body
self.smallerBody = smallerBody
self.type = ctype
return self |
java | public Object getParamKeyValue(HttpServletRequest request, ModelHandler modelHandler) {
Object keyValue = null;
try {
ModelMapping modelMapping = modelHandler.getModelMapping();
String keyName = modelMapping.getKeyName();
Debug.logVerbose("[JdonFramework] the keyName is " + keyName, module);
String keyValueS = request.getParameter(keyName);
Debug.logVerbose("[JdonFramework] got the keyValue is " + keyValueS, module);
if (keyValueS == null) {
Debug.logVerbose("[JdonFramework]the keyValue is null", module);
}
Class keyClassType = modelMapping.getKeyClassType();
if (keyClassType.isAssignableFrom(String.class)) {
keyValue = keyValueS;
} else {
Debug.logVerbose("[JdonFramework] convert String keyValue to" + keyClassType.getName(), module);
keyValue = ConvertUtils.convert(keyValueS, keyClassType);
}
} catch (Exception e) {
Debug.logError("[JdonFramework] getParamKeyValue error: " + e);
}
return keyValue;
} |
java | public String getString(ByteBuffer bytes)
{
TypeSerializer<T> serializer = getSerializer();
serializer.validate(bytes);
return serializer.toString(serializer.deserialize(bytes));
} |
python | def get_style_code(self, label):
"""Returns code for given label string
Inverse of get_code
Parameters
----------
label: String
\tLlabel string, field 0 of style tuple
"""
for style in self.styles:
if style[0] == label:
return style[1]
msg = _("Label {label} is invalid.").format(label=label)
raise ValueError(msg) |
java | public final BELScriptParser.define_namespace_return define_namespace() throws RecognitionException {
BELScriptParser.define_namespace_return retval = new BELScriptParser.define_namespace_return();
retval.start = input.LT(1);
Object root_0 = null;
Token string_literal33=null;
Token string_literal34=null;
Token string_literal35=null;
Token OBJECT_IDENT36=null;
Token string_literal37=null;
Token string_literal38=null;
BELScriptParser.quoted_value_return quoted_value39 = null;
Object string_literal33_tree=null;
Object string_literal34_tree=null;
Object string_literal35_tree=null;
Object OBJECT_IDENT36_tree=null;
Object string_literal37_tree=null;
Object string_literal38_tree=null;
paraphrases.push("in define namespace.");
try {
// BELScript.g:98:5: ( ( 'DEFINE' ( ( 'DEFAULT' )? 'NAMESPACE' ) ) OBJECT_IDENT 'AS' 'URL' quoted_value )
// BELScript.g:99:5: ( 'DEFINE' ( ( 'DEFAULT' )? 'NAMESPACE' ) ) OBJECT_IDENT 'AS' 'URL' quoted_value
{
root_0 = (Object)adaptor.nil();
// BELScript.g:99:5: ( 'DEFINE' ( ( 'DEFAULT' )? 'NAMESPACE' ) )
// BELScript.g:99:6: 'DEFINE' ( ( 'DEFAULT' )? 'NAMESPACE' )
{
string_literal33=(Token)match(input,27,FOLLOW_27_in_define_namespace429);
string_literal33_tree = (Object)adaptor.create(string_literal33);
adaptor.addChild(root_0, string_literal33_tree);
// BELScript.g:99:15: ( ( 'DEFAULT' )? 'NAMESPACE' )
// BELScript.g:99:16: ( 'DEFAULT' )? 'NAMESPACE'
{
// BELScript.g:99:16: ( 'DEFAULT' )?
int alt6=2;
int LA6_0 = input.LA(1);
if ( (LA6_0==28) ) {
alt6=1;
}
switch (alt6) {
case 1 :
// BELScript.g:99:17: 'DEFAULT'
{
string_literal34=(Token)match(input,28,FOLLOW_28_in_define_namespace433);
string_literal34_tree = (Object)adaptor.create(string_literal34);
adaptor.addChild(root_0, string_literal34_tree);
}
break;
}
string_literal35=(Token)match(input,29,FOLLOW_29_in_define_namespace437);
string_literal35_tree = (Object)adaptor.create(string_literal35);
adaptor.addChild(root_0, string_literal35_tree);
}
}
OBJECT_IDENT36=(Token)match(input,OBJECT_IDENT,FOLLOW_OBJECT_IDENT_in_define_namespace441);
OBJECT_IDENT36_tree = (Object)adaptor.create(OBJECT_IDENT36);
adaptor.addChild(root_0, OBJECT_IDENT36_tree);
string_literal37=(Token)match(input,30,FOLLOW_30_in_define_namespace443);
string_literal37_tree = (Object)adaptor.create(string_literal37);
adaptor.addChild(root_0, string_literal37_tree);
string_literal38=(Token)match(input,31,FOLLOW_31_in_define_namespace445);
string_literal38_tree = (Object)adaptor.create(string_literal38);
adaptor.addChild(root_0, string_literal38_tree);
pushFollow(FOLLOW_quoted_value_in_define_namespace447);
quoted_value39=quoted_value();
state._fsp--;
adaptor.addChild(root_0, quoted_value39.getTree());
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
paraphrases.pop();
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re);
}
finally {
}
return retval;
} |
python | def get_class_recipes(class_url, max_page=20, sleep=0.1):
"""获取某个菜谱分类url下的所有菜谱url"""
class_url = class_url + "?page={page}"
recipes = dict()
# 暴力爬取方案,每个菜谱分类请求100页
for page in range(1, max_page):
time.sleep(sleep)
url = class_url.format(page=page)
print("current url: ", url)
response = requests.get(url, headers=get_header())
html = BeautifulSoup(response.text, "lxml")
# 获取本页菜谱
menus = html.find("div", {'class': 'new-menu-list search-menu-list clearfix mt10'})
if menus:
print("get recipes fail: ", url)
menus = menus.find_all('a')
for m in menus:
name = re.sub("\n| ", "", m.text)
recipe_url = urljoin(HOME_URL, m['href'])
recipes[name] = recipe_url
# 判断是否是最后一页
# next_page = html.find('div', {'class': 'paging mt20'}).text
# if "下一页" in next_page:
# page += 1
# else:
# break
return recipes |
java | @Override
public int read() throws IOException {
if (newLineWasRead) {
line += 1;
column = 1;
newLineWasRead = false;
}
int charRead = super.read();
if (charRead > -1) {
char c = (char)charRead;
// found a \r or \n, like on Mac or Unix
// could also be Windows' \r\n
if (c == '\r' || c == '\n') {
newLineWasRead = true;
if (c == '\r') {
mark(1);
c = (char)super.read();
// check if we have \r\n like on Windows
// if it's not \r\n we reset, otherwise, the \n is just consummed
if (c != '\n') {
reset();
}
}
} else {
column += 1;
}
}
return charRead;
} |
java | public void addTrainingInstance(String label, List<String> features) {
maxent.addInstance(label,features);
} |
python | def query_single_page(query, lang, pos, retry=50, from_user=False):
"""
Returns tweets from the given URL.
:param query: The query parameter of the query url
:param lang: The language parameter of the query url
:param pos: The query url parameter that determines where to start looking
:param retry: Number of retries if something goes wrong.
:return: The list of tweets, the pos argument for getting the next page.
"""
url = get_query_url(query, lang, pos, from_user)
try:
response = requests.get(url, headers=HEADER)
if pos is None: # html response
html = response.text or ''
json_resp = None
else:
html = ''
try:
json_resp = json.loads(response.text)
html = json_resp['items_html'] or ''
except ValueError as e:
logger.exception('Failed to parse JSON "{}" while requesting "{}"'.format(e, url))
tweets = list(Tweet.from_html(html))
if not tweets:
if json_resp:
pos = json_resp['min_position']
else:
pos = None
if retry > 0:
return query_single_page(query, lang, pos, retry - 1, from_user)
else:
return [], pos
if json_resp:
return tweets, urllib.parse.quote(json_resp['min_position'])
if from_user:
return tweets, tweets[-1].id
return tweets, "TWEET-{}-{}".format(tweets[-1].id, tweets[0].id)
except requests.exceptions.HTTPError as e:
logger.exception('HTTPError {} while requesting "{}"'.format(
e, url))
except requests.exceptions.ConnectionError as e:
logger.exception('ConnectionError {} while requesting "{}"'.format(
e, url))
except requests.exceptions.Timeout as e:
logger.exception('TimeOut {} while requesting "{}"'.format(
e, url))
except json.decoder.JSONDecodeError as e:
logger.exception('Failed to parse JSON "{}" while requesting "{}".'.format(
e, url))
if retry > 0:
logger.info('Retrying... (Attempts left: {})'.format(retry))
return query_single_page(query, lang, pos, retry - 1)
logger.error('Giving up.')
return [], None |
python | def _apply(self, func, name, window=None, center=None,
check_minp=None, **kwargs):
"""
Dispatch to apply; we are stripping all of the _apply kwargs and
performing the original function call on the grouped object.
"""
def f(x, name=name, *args):
x = self._shallow_copy(x)
if isinstance(name, str):
return getattr(x, name)(*args, **kwargs)
return x.apply(name, *args, **kwargs)
return self._groupby.apply(f) |
java | public static List<Instant> instantsInRange(Instant firstInstant, Instant lastInstant,
Schedule schedule) {
Preconditions.checkArgument(
isAligned(firstInstant, schedule) && isAligned(lastInstant, schedule),
"unaligned instant");
Preconditions.checkArgument(!lastInstant.isBefore(firstInstant),
"last instant should not be before first instant");
final ExecutionTime executionTime = ExecutionTime.forCron(cron(schedule));
final List<Instant> instants = new ArrayList<>();
Instant currentInstant = firstInstant;
while (currentInstant.isBefore(lastInstant)) {
instants.add(currentInstant);
final ZonedDateTime utcDateTime = currentInstant.atZone(UTC);
currentInstant = executionTime.nextExecution(utcDateTime)
.orElseThrow(IllegalArgumentException::new) // with unix cron, this should not happen
.toInstant();
}
return instants;
} |
java | public static List<Long> createTimestampList(final long startUnixTimestamp,
final long endUnixTimestamp) {
if (startUnixTimestamp > endUnixTimestamp) {
return Collections.emptyList();
}
// normalize the start and end (next day's start)
final long normStart = TimeModifier.START_OF_DAY.applyModifier(startUnixTimestamp);
final long normEnd = TimeModifier.moveDays(endUnixTimestamp, true, 1);
// determine which times we have to query for
final List<Long> times = new ArrayList<>();
for (long time = normStart; time < normEnd; time += 24 * 60 * 60) {
times.add(time);
}
return times;
} |
python | def field_cache_to_index_pattern(self, field_cache):
"""Return a .kibana index-pattern doc_type"""
mapping_dict = {}
mapping_dict['customFormats'] = "{}"
mapping_dict['title'] = self.index_pattern
# now post the data into .kibana
mapping_dict['fields'] = json.dumps(field_cache, separators=(',', ':'))
# in order to post, we need to create the post string
mapping_str = json.dumps(mapping_dict, separators=(',', ':'))
return mapping_str |
java | public void setDevices(java.util.Collection<DeviceSummary> devices) {
if (devices == null) {
this.devices = null;
return;
}
this.devices = new java.util.ArrayList<DeviceSummary>(devices);
} |
java | public static ipsecparameter get(nitro_service service) throws Exception{
ipsecparameter obj = new ipsecparameter();
ipsecparameter[] response = (ipsecparameter[])obj.get_resources(service);
return response[0];
} |
java | public Vector4d fma(Vector4dc a, Vector4dc b, Vector4d dest) {
dest.x = x + a.x() * b.x();
dest.y = y + a.y() * b.y();
dest.z = z + a.z() * b.z();
dest.w = w + a.w() * b.w();
return dest;
} |
java | @Override
public void beginDefinitionList(Map<String, String> parameters)
{
if (getBlockState().getDefinitionListDepth() == 1 && !getBlockState().isInList()) {
printEmptyLine();
} else {
getPrinter().print(NL);
}
} |
python | def set_postmortem_debugger(cls, cmdline,
auto = None, hotkey = None, bits = None):
"""
Sets the postmortem debugging settings in the Registry.
@warning: This method requires administrative rights.
@see: L{get_postmortem_debugger}
@type cmdline: str
@param cmdline: Command line to the new postmortem debugger.
When the debugger is invoked, the first "%ld" is replaced with the
process ID and the second "%ld" is replaced with the event handle.
Don't forget to enclose the program filename in double quotes if
the path contains spaces.
@type auto: bool
@param auto: Set to C{True} if no user interaction is allowed, C{False}
to prompt a confirmation dialog before attaching.
Use C{None} to leave this value unchanged.
@type hotkey: int
@param hotkey: Virtual key scan code for the user defined hotkey.
Use C{0} to disable the hotkey.
Use C{None} to leave this value unchanged.
@type bits: int
@param bits: Set to C{32} for the 32 bits debugger, or C{64} for the
64 bits debugger. Set to {None} for the default (L{System.bits}).
@rtype: tuple( str, bool, int )
@return: Previously defined command line and auto flag.
@raise WindowsError:
Raises an exception on error.
"""
if bits is None:
bits = cls.bits
elif bits not in (32, 64):
raise NotImplementedError("Unknown architecture (%r bits)" % bits)
if bits == 32 and cls.bits == 64:
keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug'
else:
keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug'
key = cls.registry[keyname]
if cmdline is not None:
key['Debugger'] = cmdline
if auto is not None:
key['Auto'] = int(bool(auto))
if hotkey is not None:
key['UserDebuggerHotkey'] = int(hotkey) |
python | def published(self, request=None):
"""
Returns the published documents in the current language.
:param request: A Request instance.
"""
language = getattr(request, 'LANGUAGE_CODE', get_language())
if not language:
return self.model.objects.none()
qs = self.get_queryset()
qs = qs.filter(
translations__is_published=True,
translations__language_code=language,
)
# either it has no category or the one it has is published
qs = qs.filter(
models.Q(category__isnull=True) |
models.Q(category__is_published=True))
return qs |
python | def xmlGenBinaryDataArrayList(binaryDataInfo, binaryDataDict,
compression='zlib', arrayTypes=None):
""" #TODO: docstring
:params binaryDataInfo: #TODO: docstring
:params binaryDataDict: #TODO: docstring
:params compression: #TODO: docstring
:params arrayTypes: #TODO: docstring
:returns: #TODO: docstring
"""
#Note: any other value for "compression" than "zlib" results in no
# compression
#Note: Use arrayTypes parameter to specify the order of the arrays
if arrayTypes is None:
arrayTypes = [_ for _ in viewkeys(binaryDataInfo)]
numEntries = len(binaryDataInfo)
xmlBinaryDataArrayList = ETREE.Element('binaryDataArrayList',
{'count': str(numEntries)}
)
for arrayType in arrayTypes:
_, dataTypeParam = maspy.xml.findBinaryDataType(binaryDataInfo[arrayType]['params'])
binaryData = binaryDataDict[arrayType]
bitEncoding = '64' if binaryData.dtype.str == '<f8' else '32'
if binaryData.size > 0:
binaryData, arrayLength = maspy.xml.encodeBinaryData(binaryData,
bitEncoding,
compression
)
else:
binaryData = ''
arrayLength = 0
# --- define binaryDataArray parameters --- #
params = list()
if bitEncoding == '64':
params.append(('MS:1000523', None, None))
else:
params.append(('MS:1000521', None, None))
if compression == 'zlib':
params.append(('MS:1000574', None, None))
else:
params.append(('MS:1000576', None, None))
mandatoryAccessions = ['MS:1000523', 'MS:1000521', 'MS:1000574',
'MS:1000576'
]
for param in binaryDataInfo[arrayType]['params']:
if param[0] not in mandatoryAccessions:
params.append(param)
#Note: not all attributes supported
binaryDataArrayAttrib = {'encodedLength': str(len(binaryData))}
for attr in ['dataProcessingRef']:
if binaryDataInfo[arrayType][attr] is not None:
binaryDataArrayAttrib[attr] = binaryDataInfo[arrayType][attr]
xmlBinaryDataArray = ETREE.Element('binaryDataArray',
binaryDataArrayAttrib
)
maspy.xml.xmlAddParams(xmlBinaryDataArray, params)
xmlBinary = ETREE.Element('binary')
xmlBinary.text = binaryData
xmlBinaryDataArray.append(xmlBinary)
xmlBinaryDataArrayList.append(xmlBinaryDataArray)
return xmlBinaryDataArrayList |
python | def fnmatch(name, pat):
"""Test whether FILENAME matches PATTERN.
Patterns are Unix shell style:
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
An initial period in FILENAME is not special.
Both FILENAME and PATTERN are first case-normalized
if the operating system requires it.
If you don't want this, use fnmatchcase(FILENAME, PATTERN).
"""
name = name.lower()
pat = pat.lower()
return fnmatchcase(name, pat) |
python | def get_for_model(self, model):
"""
Returns tuple (Entry instance, created) for specified
model instance.
:rtype: wagtailplus.wagtailrelations.models.Entry.
"""
return self.get_or_create(
content_type = ContentType.objects.get_for_model(model),
object_id = model.pk
) |
java | public static CommerceAddressRestriction findByCommerceCountryId_First(
long commerceCountryId,
OrderByComparator<CommerceAddressRestriction> orderByComparator)
throws com.liferay.commerce.exception.NoSuchAddressRestrictionException {
return getPersistence()
.findByCommerceCountryId_First(commerceCountryId,
orderByComparator);
} |
java | public static void sendEmail(String email, String subject, String emailBody) {
Intent emailIntent = new Intent(Intent.ACTION_SEND);
emailIntent.setType("message/rfc822");
emailIntent.putExtra(Intent.EXTRA_EMAIL, new String[]{email});
emailIntent.putExtra(Intent.EXTRA_SUBJECT, subject);
emailIntent.putExtra(Intent.EXTRA_TEXT, emailBody);
emailIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
try {
QuickUtils.getContext().startActivity(emailIntent);
} catch (android.content.ActivityNotFoundException ex) {
QuickUtils.system.toast("There are no email clients installed.");
}
} |
java | public boolean masterStarted() {
if (objectMapper == null)
objectMapper = new ObjectMapper();
try {
String type = objectMapper.readValue(
Unirest.get(String.format("http://%s:%d/opType", masterStatusHost, masterStatusPort)).asJson()
.getBody().toString(),
ServerTypeJson.class).getType();
if (!type.equals("master"))
throw new IllegalStateException("Wrong opType " + type);
Unirest.get(String.format("http://%s:%d/started", masterStatusHost, masterStatusPort)).asJson().getBody();
return objectMapper.readValue(
Unirest.get(String.format("http://%s:%d/started", masterStatusHost, masterStatusPort))
.asJson().getBody().toString(),
MasterStatus.class).started();
} catch (Exception e) {
e.printStackTrace();
}
return false;
} |
python | def as_html(self, table_class='code-difftable', line_class='line',
new_lineno_class='lineno old', old_lineno_class='lineno new',
code_class='code'):
"""
Return udiff as html table with customized css classes
"""
def _link_to_if(condition, label, url):
"""
Generates a link if condition is meet or just the label if not.
"""
if condition:
return '''<a href="%(url)s">%(label)s</a>''' % {'url': url,
'label': label}
else:
return label
diff_lines = self.prepare()
_html_empty = True
_html = []
_html.append('''<table class="%(table_class)s">\n''' \
% {'table_class': table_class})
for diff in diff_lines:
for line in diff['chunks']:
_html_empty = False
for change in line:
_html.append('''<tr class="%(line_class)s %(action)s">\n''' \
% {'line_class': line_class,
'action': change['action']})
anchor_old_id = ''
anchor_new_id = ''
anchor_old = "%(filename)s_o%(oldline_no)s" % \
{'filename': self._safe_id(diff['filename']),
'oldline_no': change['old_lineno']}
anchor_new = "%(filename)s_n%(oldline_no)s" % \
{'filename': self._safe_id(diff['filename']),
'oldline_no': change['new_lineno']}
cond_old = change['old_lineno'] != '...' and \
change['old_lineno']
cond_new = change['new_lineno'] != '...' and \
change['new_lineno']
if cond_old:
anchor_old_id = 'id="%s"' % anchor_old
if cond_new:
anchor_new_id = 'id="%s"' % anchor_new
###########################################################
# OLD LINE NUMBER
###########################################################
_html.append('''\t<td %(a_id)s class="%(old_lineno_cls)s">''' \
% {'a_id': anchor_old_id,
'old_lineno_cls': old_lineno_class})
_html.append('''<pre>%(link)s</pre>''' \
% {'link':
_link_to_if(cond_old, change['old_lineno'], '#%s' \
% anchor_old)})
_html.append('''</td>\n''')
###########################################################
# NEW LINE NUMBER
###########################################################
_html.append('''\t<td %(a_id)s class="%(new_lineno_cls)s">''' \
% {'a_id': anchor_new_id,
'new_lineno_cls': new_lineno_class})
_html.append('''<pre>%(link)s</pre>''' \
% {'link':
_link_to_if(cond_new, change['new_lineno'], '#%s' \
% anchor_new)})
_html.append('''</td>\n''')
###########################################################
# CODE
###########################################################
_html.append('''\t<td class="%(code_class)s">''' \
% {'code_class': code_class})
_html.append('''\n\t\t<pre>%(code)s</pre>\n''' \
% {'code': change['line']})
_html.append('''\t</td>''')
_html.append('''\n</tr>\n''')
_html.append('''</table>''')
if _html_empty:
return None
return ''.join(_html) |
java | public void invalidate() {
this.appToken = null;
this.appDBID = null;
this.authorizer = null;
this.intuitServiceType = null;
this.realmID = null;
} |
python | def cli(ctx, config_file, profile, endpoint_url, output, color, debug):
"""
Alerta client unified command-line tool.
"""
config = Config(config_file)
config.get_config_for_profle(profile)
config.get_remote_config(endpoint_url)
ctx.obj = config.options
# override current options with command-line options or environment variables
ctx.obj['output'] = output or config.options['output']
ctx.obj['color'] = color or os.environ.get('CLICOLOR', None) or config.options['color']
endpoint = endpoint_url or config.options['endpoint']
ctx.obj['client'] = Client(
endpoint=endpoint,
key=config.options['key'],
token=get_token(endpoint),
username=config.options.get('username', None),
password=config.options.get('password', None),
timeout=float(config.options['timeout']),
ssl_verify=config.options['sslverify'],
debug=debug or os.environ.get('DEBUG', None) or config.options['debug']
) |
java | public final void mFLOAT() throws RecognitionException {
try {
int _type = FLOAT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// src/riemann/Query.g:92:5: ( ( '-' )? ( '0' .. '9' )+ ( '.' ( '0' .. '9' )* )? ( EXPONENT )? )
// src/riemann/Query.g:92:9: ( '-' )? ( '0' .. '9' )+ ( '.' ( '0' .. '9' )* )? ( EXPONENT )?
{
// src/riemann/Query.g:92:9: ( '-' )?
int alt4=2;
int LA4_0 = input.LA(1);
if ( (LA4_0=='-') ) {
alt4=1;
}
switch (alt4) {
case 1 :
// src/riemann/Query.g:92:9: '-'
{
match('-');
}
break;
}
// src/riemann/Query.g:92:14: ( '0' .. '9' )+
int cnt5=0;
loop5:
do {
int alt5=2;
int LA5_0 = input.LA(1);
if ( ((LA5_0>='0' && LA5_0<='9')) ) {
alt5=1;
}
switch (alt5) {
case 1 :
// src/riemann/Query.g:92:15: '0' .. '9'
{
matchRange('0','9');
}
break;
default :
if ( cnt5 >= 1 ) break loop5;
EarlyExitException eee =
new EarlyExitException(5, input);
throw eee;
}
cnt5++;
} while (true);
// src/riemann/Query.g:92:26: ( '.' ( '0' .. '9' )* )?
int alt7=2;
int LA7_0 = input.LA(1);
if ( (LA7_0=='.') ) {
alt7=1;
}
switch (alt7) {
case 1 :
// src/riemann/Query.g:92:27: '.' ( '0' .. '9' )*
{
match('.');
// src/riemann/Query.g:92:31: ( '0' .. '9' )*
loop6:
do {
int alt6=2;
int LA6_0 = input.LA(1);
if ( ((LA6_0>='0' && LA6_0<='9')) ) {
alt6=1;
}
switch (alt6) {
case 1 :
// src/riemann/Query.g:92:32: '0' .. '9'
{
matchRange('0','9');
}
break;
default :
break loop6;
}
} while (true);
}
break;
}
// src/riemann/Query.g:92:45: ( EXPONENT )?
int alt8=2;
int LA8_0 = input.LA(1);
if ( (LA8_0=='E'||LA8_0=='e') ) {
alt8=1;
}
switch (alt8) {
case 1 :
// src/riemann/Query.g:92:45: EXPONENT
{
mEXPONENT();
}
break;
}
}
state.type = _type;
state.channel = _channel;
}
finally {
}
} |
java | @SuppressWarnings("checkstyle:all")
protected StringConcatenationClient generateMembers(boolean forInterface, boolean forAppender) {
final CodeElementExtractor.ElementDescription parameter = getCodeElementExtractor().getFormalParameter();
final FormalParameterDescription exparameter = new FormalParameterDescription(parameter,
findAssignmentFromFeatureName(parameter.getGrammarComponent(),
getCodeBuilderConfig().getParameterDefaultValueGrammarName()));
return new StringConcatenationClient() {
@Override
protected void appendTo(TargetStringConcatenation it) {
if (!forInterface && !forAppender) {
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(Provider.class);
it.append("<"); //$NON-NLS-1$
it.append(getExpressionBuilderInterface());
it.append("> expressionProvider;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(getCodeElementExtractor().getFormalParameterContainerType());
it.append(" context;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(parameter.getElementType());
it.append(" parameter;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\tprivate "); //$NON-NLS-1$
it.append(getExpressionBuilderInterface());
it.append(" defaultValue;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append("\t\tprivate "); //$NON-NLS-1$
it.append(TypesFactory.class);
it.append(" jvmTypesFactory;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Inject.class);
it.newLine();
it.append(" private "); //$NON-NLS-1$
it.append(IFragmentProvider.class);
it.append(" fragmentProvider;"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
} else {
it.append("\t/** Find the reference to the type with the given name."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param typeName the fully qualified name of the type"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the type reference."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append(JvmParameterizedTypeReference.class);
it.append(" newTypeRef(String typeName)"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.builder.newTypeRef(typeName);"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Find the reference to the type with the given name."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param context the context for the type reference use"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param typeName the fully qualified name of the type"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the type reference."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append(JvmParameterizedTypeReference.class);
it.append(" newTypeRef("); //$NON-NLS-1$
it.append(Notifier.class);
it.append(" context, String typeName)"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.builder.newTypeRef(context, typeName);"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
}
if (forInterface) {
it.append("\t/** Replies the context for type resolution."); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the context or <code>null</code> if the Ecore object is the context."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
it.append(IJvmTypeProvider.class);
it.append(" getTypeResolutionContext();"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
} else if (forAppender) {
it.append("\tpublic "); //$NON-NLS-1$
it.append(IJvmTypeProvider.class);
it.append(" getTypeResolutionContext() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.builder.getTypeResolutionContext();"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
}
it.append("\t/** Initialize the formal parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param context the context of the formal parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t * @param name the name of the formal parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append("void eInit("); //$NON-NLS-1$
it.append(getCodeElementExtractor().getFormalParameterContainerType());
it.append(" context, String name, "); //$NON-NLS-1$
it.append(IJvmTypeProvider.class);
it.append(" typeContext)"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
if (forAppender) {
it.append("\t\tthis.builder.eInit(context, name, typeContext);"); //$NON-NLS-1$
} else {
it.append("\t\tsetTypeResolutionContext(typeContext);"); //$NON-NLS-1$
it.newLine();
it.append("\t\tthis.context = context;"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthis.parameter = "); //$NON-NLS-1$
it.append(getXFactoryFor(parameter.getElementType()));
it.append(".eINSTANCE.create"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(parameter.getElementType().getSimpleName()));
it.append("();"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthis.parameter.set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterNameGrammarName()));
it.append("(name);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthis.parameter.set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterTypeGrammarName()));
it.append("(newTypeRef(this.context, Object.class.getName()));"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthis.context.get"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterListGrammarName()));
it.append("().add(this.parameter);"); //$NON-NLS-1$
}
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the created parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append(parameter.getElementType());
it.append(" get"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(parameter.getElementType().getSimpleName()));
it.append("()"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
if (forAppender) {
it.append("\t\treturn this.builder.get"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(parameter.getElementType().getSimpleName()));
it.append("();"); //$NON-NLS-1$
} else {
it.append("\t\treturn this.parameter;"); //$NON-NLS-1$
}
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the JvmIdentifiable that corresponds to the formal parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param container the feature call that is supposed to contains the replied identifiable element."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append("void setReferenceInto("); //$NON-NLS-1$
it.append(XFeatureCall.class);
it.append(" container) "); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append("{"); //$NON-NLS-1$
it.newLine();
it.append("\t\t"); //$NON-NLS-1$
if (forAppender) {
it.append("this.builder.setReferenceInto(container);"); //$NON-NLS-1$
} else {
it.append(JvmVoid.class);
it.append(" jvmVoid = this.jvmTypesFactory.createJvmVoid();"); //$NON-NLS-1$
it.newLine();
it.append("\t\tif (jvmVoid instanceof "); //$NON-NLS-1$
it.append(InternalEObject.class);
it.append(") {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tfinal "); //$NON-NLS-1$
it.append(InternalEObject.class);
it.append("\t\t\tjvmVoidProxy = ("); //$NON-NLS-1$
it.append(InternalEObject.class);
it.append(") jvmVoid;"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tfinal "); //$NON-NLS-1$
it.append(EObject.class);
it.append(" param = getSarlFormalParameter();"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tfinal "); //$NON-NLS-1$
it.append(Resource.class);
it.append(" resource = param.eResource();"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t// Get the derived object"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tfinal "); //$NON-NLS-1$
it.append(parameter.getElementType());
it.append(" jvmParam = getAssociatedElement("); //$NON-NLS-1$
it.append(parameter.getElementType());
it.append(".class, param, resource);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t// Set the proxy URI"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tfinal "); //$NON-NLS-1$
it.append(URI.class);
it.append(" uri = "); //$NON-NLS-1$
it.append(EcoreUtil2.class);
it.append(".getNormalizedURI(jvmParam);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tjvmVoidProxy.eSetProxyURI(uri);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\tcontainer.setFeature(jvmVoid);"); //$NON-NLS-1$
}
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Replies the resource to which the formal parameter is attached."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append(Resource.class);
it.append(" eResource()"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn get"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(parameter.getElementType().getSimpleName()));
it.append("().eResource();"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
it.append("\t/** Change the type."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param type the formal parameter type."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append("void set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterTypeGrammarName()));
it.append("(String type)"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
if (forAppender) {
it.append("\t\tthis.builder.set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterTypeGrammarName()));
it.append("(type);"); //$NON-NLS-1$
} else {
it.append("\t\tString typeName;"); //$NON-NLS-1$
it.newLine();
it.append("\t\tif ("); //$NON-NLS-1$
it.append(Strings.class);
it.append(".isEmpty(type)) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\ttypeName = Object.class.getName();"); //$NON-NLS-1$
it.newLine();
it.append("\t\t} else {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\ttypeName = type;"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\tthis.parameter.set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterTypeGrammarName()));
it.append("(newTypeRef(this.context, typeName));"); //$NON-NLS-1$
}
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
if (!Strings.isEmpty(getCodeBuilderConfig().getParameterVarArgGrammarName())) {
it.append("\t/** Change the variadic property of the parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t *"); //$NON-NLS-1$
it.newLine();
it.append("\t * @param isVariadic indicates if the parameter is variadic."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append("void set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterVarArgGrammarName()));
it.append("(boolean isVariadic)"); //$NON-NLS-1$
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
if (forAppender) {
it.append("\t\tthis.builder.set"); //$NON-NLS-1$
} else {
it.append("\t\tthis.parameter.set"); //$NON-NLS-1$
}
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterVarArgGrammarName()));
it.append("(isVariadic);"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
}
if (exparameter.getDefaultValueAssignment() != null) {
it.append("\t/** Replies the default value of the parameter."); //$NON-NLS-1$
it.newLine();
it.append("\t * @return the default value builder."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\t"); //$NON-NLS-1$
if (!forInterface) {
it.append("public "); //$NON-NLS-1$
}
it.append(getExpressionBuilderInterface());
final String accessor = "get" //$NON-NLS-1$
+ Strings.toFirstUpper(getCodeBuilderConfig().getParameterDefaultValueGrammarName())
+ "()"; //$NON-NLS-1$
it.append(" "); //$NON-NLS-1$
it.append(accessor);
if (forInterface) {
it.append(";"); //$NON-NLS-1$
} else {
it.append(" {"); //$NON-NLS-1$
it.newLine();
if (forAppender) {
it.append("\t\treturn this.builder.get"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterDefaultValueGrammarName()));
it.append("();"); //$NON-NLS-1$
} else {
it.append("\t\tif (this.defaultValue == null) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthis.defaultValue = this.expressionProvider.get();"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\tthis.defaultValue.eInit(this.parameter, new "); //$NON-NLS-1$
it.append(Procedures.class);
it.append(".Procedure1<"); //$NON-NLS-1$
it.append(XExpression.class);
it.append(">() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t\tpublic void apply("); //$NON-NLS-1$
it.append(XExpression.class);
it.append(" it) {"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t\t\tget"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(parameter.getElementType().getSimpleName()));
it.append("().set"); //$NON-NLS-1$
it.append(Strings.toFirstUpper(getCodeBuilderConfig().getParameterDefaultValueGrammarName()));
it.append("(it);"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\t\t\t}, getTypeResolutionContext());"); //$NON-NLS-1$
it.newLine();
it.append("\t\t}"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn this.defaultValue;"); //$NON-NLS-1$
}
it.newLine();
it.append("\t}"); //$NON-NLS-1$
}
it.newLineIfNotEmpty();
it.newLine();
if (!forAppender && !forInterface) {
it.append("\t@"); //$NON-NLS-1$
it.append(Override.class);
it.newLine();
it.append("\t@"); //$NON-NLS-1$
it.append(Pure.class);
it.newLine();
it.append("\tpublic "); //$NON-NLS-1$
it.append(String.class);
it.append(" toString() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\treturn "); //$NON-NLS-1$
if (forAppender) {
it.append("this.builder.toString();"); //$NON-NLS-1$
} else {
it.append(EmfFormatter.class);
it.append(".objToStr("); //$NON-NLS-1$
it.append(accessor);
it.append(");"); //$NON-NLS-1$
}
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
}
}
if (forInterface) {
it.append("\t/** Dispose the resource."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\tvoid dispose();"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
} else if (forAppender) {
it.append("\t/** Dispose the resource."); //$NON-NLS-1$
it.newLine();
it.append("\t */"); //$NON-NLS-1$
it.newLine();
it.append("\tpublic void dispose() {"); //$NON-NLS-1$
it.newLine();
it.append("\t\tthis.builder.dispose();"); //$NON-NLS-1$
it.newLine();
it.append("\t}"); //$NON-NLS-1$
it.newLineIfNotEmpty();
it.newLine();
}
}
};
} |
python | def main():
"""
NAME
common_mean.py
DESCRIPTION
calculates bootstrap statistics to test for common mean
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
common_mean.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, input file
-f2 FILE, optional second file to compare with first file
-dir D I, optional direction to compare with input file
-fmt [svg,jpg,pnd,pdf] set figure format [default is svg]
NOTES
must have either F2 OR dir but not both
"""
d,i,file2="","",""
fmt,plot='svg',0
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-sav' in sys.argv: plot=1
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file1=sys.argv[ind+1]
if '-f2' in sys.argv:
ind=sys.argv.index('-f2')
file2=sys.argv[ind+1]
if '-dir' in sys.argv:
ind=sys.argv.index('-dir')
d=float(sys.argv[ind+1])
i=float(sys.argv[ind+2])
D1=numpy.loadtxt(file1,dtype=numpy.float)
if file2!="": D2=numpy.loadtxt(file2,dtype=numpy.float)
#
counter,NumSims=0,1000
#
# get bootstrapped means for first data set
#
print("Doing first set of directions, please be patient..")
BDI1=pmag.di_boot(D1)
#
# convert to cartesian coordinates X1,X2, Y1,Y2 and Z1, Z2
#
if d=="": # repeat for second data set
print("Doing second set of directions, please be patient..")
BDI2=pmag.di_boot(D2)
else:
BDI2=[]
# set up plots
CDF={'X':1,'Y':2,'Z':3}
pmagplotlib.plot_init(CDF['X'],4,4)
pmagplotlib.plot_init(CDF['Y'],4,4)
pmagplotlib.plot_init(CDF['Z'],4,4)
# draw the cdfs
pmagplotlib.plot_com(CDF,BDI1,BDI2,[d,i])
files={}
files['X']='CD_X.'+fmt
files['Y']='CD_Y.'+fmt
files['Z']='CD_Z.'+fmt
if plot==0:
pmagplotlib.draw_figs(CDF)
ans=input("S[a]ve plots, <Return> to quit ")
if ans=="a":
pmagplotlib.save_plots(CDF,files)
else:
sys.exit()
else:
pmagplotlib.save_plots(CDF,files)
sys.exit() |
python | def find(self, path, resolved=True):
"""
Get the definition object for the schema type located at the specified
path.
The path may contain (.) dot notation to specify nested types.
Actually, the path separator is usually a (.) but can be redefined
during contruction.
@param path: A (.) separated path to a schema type.
@type path: basestring
@param resolved: A flag indicating that the fully resolved type
should be returned.
@type resolved: boolean
@return: The found schema I{type}
@rtype: L{xsd.sxbase.SchemaObject}
"""
result = None
parts = self.split(path)
try:
result = self.root(parts)
if len(parts) > 1:
result = result.resolve(nobuiltin=True)
result = self.branch(result, parts)
result = self.leaf(result, parts)
if resolved:
result = result.resolve(nobuiltin=True)
except PathResolver.BadPath:
log.error('path: "%s", not-found' % path)
return result |
python | def datasets(self):
"""
Return all datasets
:return:
"""
return self.session.query(Dataset).filter(Dataset.vid != ROOT_CONFIG_NAME_V).all() |
java | public static <E extends Comparable<E>> void sortDescending(E[] intArray) {
Quicksort.sort(intArray, 0, intArray.length - 1 , true);
} |
python | def clean_lprof_file(input_fname, output_fname=None):
""" Reads a .lprof file and cleans it """
# Read the raw .lprof text dump
text = ut.read_from(input_fname)
# Sort and clean the text
output_text = clean_line_profile_text(text)
return output_text |
java | @Nonnull
public JSVar var (@Nonnull @Nonempty final String sName,
@Nullable final String sInitValue) throws JSNameAlreadyExistsException
{
return var (sName, sInitValue == null ? JSExpr.NULL : JSExpr.lit (sInitValue));
} |
python | def _updateWordSet(self):
"""Make a set of words, which shall be completed, from text
"""
self._wordSet = set(self._keywords) | set(self._customCompletions)
start = time.time()
for line in self._qpart.lines:
for match in _wordRegExp.findall(line):
self._wordSet.add(match)
if time.time() - start > self._WORD_SET_UPDATE_MAX_TIME_SEC:
"""It is better to have incomplete word set, than to freeze the GUI"""
break |
python | def get_cached_token(self):
''' Gets a cached auth token
'''
token_info = None
if self.cache_path:
try:
f = open(self.cache_path)
token_info_string = f.read()
f.close()
token_info = json.loads(token_info_string)
if self.is_token_expired(token_info):
token_info = self.refresh_access_token(token_info['refresh_token'])
except IOError:
pass
return token_info |
python | def detached(name):
'''
Ensure zone is detached
name : string
name of the zone
'''
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
zones = __salt__['zoneadm.list'](installed=True, configured=True)
if name in zones:
if zones[name]['state'] != 'configured':
if __opts__['test']:
res_detach = {'status': True}
else:
res_detach = __salt__['zoneadm.detach'](name)
ret['result'] = res_detach['status']
if ret['result']:
ret['changes'][name] = 'detached'
ret['comment'] = 'The zone {0} was detached.'.format(name)
else:
ret['comment'] = []
ret['comment'].append('Failed to detach zone {0}!'.format(name))
if 'message' in res_detach:
ret['comment'].append(res_detach['message'])
ret['comment'] = "\n".join(ret['comment'])
else:
ret['result'] = True
ret['comment'] = 'zone {0} already detached.'.format(name)
else:
## note: a non existing zone is not attached, we do not consider this a failure
ret['result'] = True
ret['comment'] = 'zone {0} is not configured!'.format(name)
return ret |
java | public DatanodeInfo chooseTargetNodes(Set<DatanodeInfo> excludedNodes)
throws IOException {
DatanodeInfo target = cluster.getNodeOnDifferentRack(excludedNodes);
if (target == null) {
throw new IOException ("Error choose datanode");
}
return target;
} |
python | def DviPsStrFunction(target = None, source= None, env=None):
"""A strfunction for dvipdf that returns the appropriate
command string for the no_exec options."""
if env.GetOption("no_exec"):
result = env.subst('$PSCOM',0,target,source)
else:
result = ''
return result |
java | public static MemberSummaryBuilder getInstance(
ClassWriter classWriter, Context context)
throws Exception {
MemberSummaryBuilder builder = new MemberSummaryBuilder(context,
classWriter.getClassDoc());
builder.memberSummaryWriters =
new MemberSummaryWriter[VisibleMemberMap.NUM_MEMBER_TYPES];
WriterFactory wf = context.configuration.getWriterFactory();
for (int i = 0; i < VisibleMemberMap.NUM_MEMBER_TYPES; i++) {
builder.memberSummaryWriters[i] =
builder.visibleMemberMaps[i].noVisibleMembers() ?
null :
wf.getMemberSummaryWriter(classWriter, i);
}
return builder;
} |
java | public <K, V> Optional<KafkaProducer<K, V>> getProducer(
@NotNull final Serializer<K> keySerializer,
@NotNull final Serializer<V> valueSerializer) {
try {
return partialConfigs.map(
input -> new KafkaProducer<>(input, keySerializer, valueSerializer));
} catch (final Exception e) {
log.warn("error while generating KafkaProducer - {}", e);
return Optional.empty();
}
} |
python | def add(self, *widgets):
'''
Place @widgets under the blitting hand of the Container(). Each arg
must be a Widget(), a fellow Container(), or an iterable. Else, things
get ugly...
'''
for w in widgets:
if is_widget(w):
if w not in self.widgets:
self.widgets.add(w)
w.add_internal(self)
elif is_container(w):
if w not in self.containers:
self.containers.add(w)
w.add_internal(self)
else:
# If it isn't an iterable, we'll get an error here.
# Desired effect.
self.add(*w) |
python | def showLipds(D=None):
"""
Display the dataset names of a given LiPD data
| Example
| lipd.showLipds(D)
:pararm dict D: LiPD data
:return none:
"""
if not D:
print("Error: LiPD data not provided. Pass LiPD data into the function.")
else:
print(json.dumps(D.keys(), indent=2))
return |
python | async def query(cls, query: str,
variables: Optional[Mapping[str, Any]] = None,
) -> Any:
'''
Sends the GraphQL query and returns the response.
:param query: The GraphQL query string.
:param variables: An optional key-value dictionary
to fill the interpolated template variables
in the query.
:returns: The object parsed from the response JSON string.
'''
gql_query = {
'query': query,
'variables': variables if variables else {},
}
rqst = Request(cls.session, 'POST', '/admin/graphql')
rqst.set_json(gql_query)
async with rqst.fetch() as resp:
return await resp.json() |
python | def publish(self, body, routing_key, exchange='amq.default',
virtual_host='/', properties=None, payload_encoding='string'):
"""Publish a Message.
:param bytes|str|unicode body: Message payload
:param str routing_key: Message routing key
:param str exchange: The exchange to publish the message to
:param str virtual_host: Virtual host name
:param dict properties: Message properties
:param str payload_encoding: Payload encoding.
:raises ApiError: Raises if the remote server encountered an error.
:raises ApiConnectionError: Raises if there was a connectivity issue.
:rtype: dict
"""
exchange = quote(exchange, '')
properties = properties or {}
body = json.dumps(
{
'routing_key': routing_key,
'payload': body,
'payload_encoding': payload_encoding,
'properties': properties,
'vhost': virtual_host
}
)
virtual_host = quote(virtual_host, '')
return self.http_client.post(API_BASIC_PUBLISH %
(
virtual_host,
exchange),
payload=body) |
python | def ethernet_interfaces(self):
"""Provide reference to EthernetInterfacesCollection instance"""
return ethernet_interface.EthernetInterfaceCollection(
self._conn,
self._get_hpe_sub_resource_collection_path('EthernetInterfaces'),
redfish_version=self.redfish_version) |
python | def get_local_tzone():
"""Get the current time zone on the local host"""
if localtime().tm_isdst:
if altzone < 0:
tzone = '+' + \
str(int(float(altzone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(
altzone) / 60 % 60)).ljust(2, '0')
else:
tzone = '-' + \
str(int(float(altzone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(
altzone) / 60 % 60)).ljust(2, '0')
else:
if altzone < 0:
tzone = \
'+' + str(int(float(timezone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(
timezone) / 60 % 60)).ljust(2, '0')
else:
tzone = \
'-' + str(int(float(timezone) / 60 // 60)).rjust(2,
'0') + \
str(int(float(
timezone) / 60 % 60)).ljust(2, '0')
return tzone |
python | def filter_primary(bam_file, data):
"""Filter reads to primary only BAM.
Removes:
- not primary alignment (0x100) 256
- supplementary alignment (0x800) 2048
"""
stem, ext = os.path.splitext(bam_file)
out_file = stem + ".primary" + ext
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
cores = dd.get_num_cores(data)
cmd = ("samtools view -@ {cores} -F 2304 -b {bam_file} > {tx_out_file}")
do.run(cmd.format(**locals()), ("Filtering primary alignments in %s." %
os.path.basename(bam_file)))
return out_file |
python | def get_template_names(self):
"""
Returns a list of template names for the view.
:rtype: list.
"""
#noinspection PyUnresolvedReferences
if self.request.is_ajax():
template_name = '/results.html'
else:
template_name = '/index.html'
return ['{0}{1}'.format(self.template_dir, template_name)] |
python | def init_from_class_batches(self, class_batches, num_shards=None):
"""Initializes work pieces from classification batches.
Args:
class_batches: dict with classification batches, could be obtained
as ClassificationBatches.data
num_shards: number of shards to split data into,
if None then no sharding is done.
"""
shards_for_submissions = {}
shard_idx = 0
for idx, (batch_id, batch_val) in enumerate(iteritems(class_batches)):
work_id = DEFENSE_WORK_ID_PATTERN.format(idx)
submission_id = batch_val['submission_id']
shard_id = None
if num_shards:
shard_id = shards_for_submissions.get(submission_id)
if shard_id is None:
shard_id = shard_idx % num_shards
shards_for_submissions[submission_id] = shard_id
shard_idx += 1
# Note: defense also might have following fields populated by worker:
# stat_correct, stat_error, stat_target_class, stat_num_images
self.work[work_id] = {
'claimed_worker_id': None,
'claimed_worker_start_time': None,
'is_completed': False,
'error': None,
'elapsed_time': None,
'submission_id': submission_id,
'shard_id': shard_id,
'output_classification_batch_id': batch_id,
} |
java | protected String getHttpAddress(String httpAddress) {
Matcher resolvedMatcher = INETSOCKETADDRESS_PATTERN.matcher(httpAddress);
if (resolvedMatcher.matches()) {
return defaultScheme + resolvedMatcher.group(1) + ":" + resolvedMatcher.group(2);
}
return null;
} |
java | @Override
public Object visitSizeCommand(InvocationContext ctx, SizeCommand command) throws Throwable {
try {
return (doBeforeCall(ctx, command)) ? handleSizeCommand(ctx, command) : null;
}
finally {
doAfterCall(ctx, command);
}
} |
python | def getmembers(obj, *predicates):
""" Return all the members of an object as a list of `(key, value)` tuples, sorted by name.
The optional list of predicates can be used to filter the members.
The default predicate drops members whose name starts with '_'. To disable it, pass `None` as the first predicate.
:param obj: Object to list the members for
:param predicates: Functions to filter the members.
If the first value is not None, a default predicate is added that filters private members out (name starts with '_')
:type predicates: tuple[Callable|None]
:returns: Sorted list of (name, value) tuples
:rtype: list[(str, *)]
"""
# Add default
if not predicates or predicates[0] is not None:
predicates = (lambda key, value: not key.startswith('_'),) + predicates
# Build composite predicate
def predicate(key_value_tuple):
key, value = key_value_tuple
for p in predicates:
if p is not None and not p(key, value):
return False
return True
# Filter
return filter(predicate, inspect.getmembers(obj)) |
python | def deriv2(self, p):
"""Second derivative of the link function g''(p)
implemented through numerical differentiation
"""
from statsmodels.tools.numdiff import approx_fprime_cs
# TODO: workaround proplem with numdiff for 1d
return np.diag(approx_fprime_cs(p, self.deriv)) |
python | def events(self):
# type: () -> Generator[Event, None, None]
"""
Return a generator that provides any events that have been generated
by protocol activity.
:returns: generator of :class:`Event <wsproto.events.Event>` subclasses
"""
while self._events:
yield self._events.popleft()
try:
for frame in self._proto.received_frames():
if frame.opcode is Opcode.PING:
assert frame.frame_finished and frame.message_finished
yield Ping(payload=frame.payload)
elif frame.opcode is Opcode.PONG:
assert frame.frame_finished and frame.message_finished
yield Pong(payload=frame.payload)
elif frame.opcode is Opcode.CLOSE:
code, reason = frame.payload
if self.state is ConnectionState.LOCAL_CLOSING:
self._state = ConnectionState.CLOSED
else:
self._state = ConnectionState.REMOTE_CLOSING
yield CloseConnection(code=code, reason=reason)
elif frame.opcode is Opcode.TEXT:
yield TextMessage(
data=frame.payload,
frame_finished=frame.frame_finished,
message_finished=frame.message_finished,
)
elif frame.opcode is Opcode.BINARY:
yield BytesMessage(
data=frame.payload,
frame_finished=frame.frame_finished,
message_finished=frame.message_finished,
)
except ParseFailed as exc:
yield CloseConnection(code=exc.code, reason=str(exc)) |
python | def cg_prolongation_smoothing(A, T, B, BtBinv, Sparsity_Pattern, maxiter, tol,
weighting='local', Cpt_params=None):
"""Use CG to smooth T by solving A T = 0, subject to nullspace and sparsity constraints.
Parameters
----------
A : csr_matrix, bsr_matrix
SPD sparse NxN matrix
T : bsr_matrix
Tentative prolongator, a NxM sparse matrix (M < N).
This is initial guess for the equation A T = 0.
Assumed that T B_c = B_f
B : array
Near-nullspace modes for coarse grid, i.e., B_c.
Has shape (M,k) where k is the number of coarse candidate vectors.
BtBinv : array
3 dimensional array such that,
BtBinv[i] = pinv(B_i.H Bi), and B_i is B restricted
to the neighborhood (in the matrix graph) of dof of i.
Sparsity_Pattern : csr_matrix, bsr_matrix
Sparse NxM matrix
This is the sparsity pattern constraint to enforce on the
eventual prolongator
maxiter : int
maximum number of iterations
tol : float
residual tolerance for A T = 0
weighting : string
'block', 'diagonal' or 'local' construction of the diagonal
preconditioning
Cpt_params : tuple
Tuple of the form (bool, dict). If the Cpt_params[0] = False, then
the standard SA prolongation smoothing is carried out. If True, then
dict must be a dictionary of parameters containing, (1) P_I: P_I.T is
the injection matrix for the Cpts, (2) I_F: an identity matrix
for only the F-points (i.e. I, but with zero rows and columns for
C-points) and I_C: the C-point analogue to I_F.
Returns
-------
T : bsr_matrix
Smoothed prolongator using conjugate gradients to solve A T = 0,
subject to the constraints, T B_c = B_f, and T has no nonzero
outside of the sparsity pattern in Sparsity_Pattern.
See Also
--------
The principal calling routine,
pyamg.aggregation.smooth.energy_prolongation_smoother
"""
# Preallocate
AP = sparse.bsr_matrix((np.zeros(Sparsity_Pattern.data.shape,
dtype=T.dtype),
Sparsity_Pattern.indices, Sparsity_Pattern.indptr),
shape=(Sparsity_Pattern.shape))
# CG will be run with diagonal preconditioning
if weighting == 'diagonal':
Dinv = get_diagonal(A, norm_eq=False, inv=True)
elif weighting == 'block':
Dinv = get_block_diag(A, blocksize=A.blocksize[0], inv_flag=True)
Dinv = sparse.bsr_matrix((Dinv, np.arange(Dinv.shape[0]),
np.arange(Dinv.shape[0]+1)),
shape=A.shape)
elif weighting == 'local':
# Based on Gershgorin estimate
D = np.abs(A)*np.ones((A.shape[0], 1), dtype=A.dtype)
Dinv = np.zeros_like(D)
Dinv[D != 0] = 1.0 / np.abs(D[D != 0])
else:
raise ValueError('weighting value is invalid')
# Calculate initial residual
# Equivalent to R = -A*T; R = R.multiply(Sparsity_Pattern)
# with the added constraint that R has an explicit 0 wherever
# R is 0 and Sparsity_Pattern is not
uones = np.zeros(Sparsity_Pattern.data.shape, dtype=T.dtype)
R = sparse.bsr_matrix((uones, Sparsity_Pattern.indices,
Sparsity_Pattern.indptr),
shape=(Sparsity_Pattern.shape))
pyamg.amg_core.incomplete_mat_mult_bsr(A.indptr, A.indices,
np.ravel(A.data),
T.indptr, T.indices,
np.ravel(T.data),
R.indptr, R.indices,
np.ravel(R.data),
int(T.shape[0]/T.blocksize[0]),
int(T.shape[1]/T.blocksize[1]),
A.blocksize[0], A.blocksize[1],
T.blocksize[1])
R.data *= -1.0
# Enforce R*B = 0
Satisfy_Constraints(R, B, BtBinv)
if R.nnz == 0:
print("Error in sa_energy_min(..). Initial R no nonzeros on a level. \
Returning tentative prolongator\n")
return T
# Calculate Frobenius norm of the residual
resid = R.nnz # np.sqrt((R.data.conjugate()*R.data).sum())
# print "Energy Minimization of Prolongator \
# --- Iteration 0 --- r = " + str(resid)
i = 0
while i < maxiter and resid > tol:
# Apply diagonal preconditioner
if weighting == 'local' or weighting == 'diagonal':
Z = scale_rows(R, Dinv)
else:
Z = Dinv*R
# Frobenius inner-product of (R,Z) = sum( np.conjugate(rk).*zk)
newsum = (R.conjugate().multiply(Z)).sum()
if newsum < tol:
# met tolerance, so halt
break
# P is the search direction, not the prolongator, which is T.
if(i == 0):
P = Z
oldsum = newsum
else:
beta = newsum / oldsum
P = Z + beta*P
oldsum = newsum
# Calculate new direction and enforce constraints
# Equivalent to: AP = A*P; AP = AP.multiply(Sparsity_Pattern)
# with the added constraint that explicit zeros are in AP wherever
# AP = 0 and Sparsity_Pattern does not !!!!
AP.data[:] = 0.0
pyamg.amg_core.incomplete_mat_mult_bsr(A.indptr, A.indices,
np.ravel(A.data),
P.indptr, P.indices,
np.ravel(P.data),
AP.indptr, AP.indices,
np.ravel(AP.data),
int(T.shape[0]/T.blocksize[0]),
int(T.shape[1]/T.blocksize[1]),
A.blocksize[0], A.blocksize[1],
P.blocksize[1])
# Enforce AP*B = 0
Satisfy_Constraints(AP, B, BtBinv)
# Frobenius inner-product of (P, AP)
alpha = newsum/(P.conjugate().multiply(AP)).sum()
# Update the prolongator, T
T = T + alpha*P
# Ensure identity at C-pts
if Cpt_params[0]:
T = Cpt_params[1]['I_F']*T + Cpt_params[1]['P_I']
# Update residual
R = R - alpha*AP
i += 1
# Calculate Frobenius norm of the residual
resid = R.nnz # np.sqrt((R.data.conjugate()*R.data).sum())
# print "Energy Minimization of Prolongator \
# --- Iteration " + str(i) + " --- r = " + str(resid)
return T |
python | def _folder_item_remarks(self, analysis_brain, item):
"""Renders the Remarks field for the passed in analysis
If the edition of the analysis is permitted, adds the field into the
list of editable fields.
:param analysis_brain: Brain that represents an analysis
:param item: analysis' dictionary counterpart that represents a row
"""
if self.analysis_remarks_enabled():
item["Remarks"] = analysis_brain.getRemarks
if self.is_analysis_edition_allowed(analysis_brain):
item["allow_edit"].extend(["Remarks"]) |
python | def capture_exceptions(self, f=None, exceptions=None): # TODO: Ash fix kwargs in base
"""
Wrap a function or code block in try/except and automatically call
``.captureException`` if it raises an exception, then the exception
is reraised.
By default, it will capture ``Exception``
>>> @client.capture_exceptions
>>> def foo():
>>> raise Exception()
>>> with client.capture_exceptions():
>>> raise Exception()
You can also specify exceptions to be caught specifically
>>> @client.capture_exceptions((IOError, LookupError))
>>> def bar():
>>> ...
``kwargs`` are passed through to ``.captureException``.
"""
if not isinstance(f, FunctionType):
# when the decorator has args which is not a function we except
# f to be the exceptions tuple
return functools.partial(self.capture_exceptions, exceptions=f)
exceptions = exceptions or (Exception,)
@functools.wraps(f)
def wrapped(event, context, *args, **kwargs):
try:
return f(event, context, *args, **kwargs)
except exceptions:
self.captureException(event=event, context=context, **kwargs)
self.context.clear()
raise
return wrapped |
java | public static double[] doubleArrayCopyOf(CollectionNumber coll) {
double[] data = new double[coll.size()];
IteratorNumber iter = coll.iterator();
int index = 0;
while (iter.hasNext()) {
data[index] = iter.nextDouble();
index++;
}
return data;
} |
java | public void retrieveCurrentCustomer(@NonNull CustomerRetrievalListener listener) {
final Customer cachedCustomer = getCachedCustomer();
if (cachedCustomer != null) {
listener.onCustomerRetrieved(cachedCustomer);
} else {
mCustomer = null;
final String operationId = UUID.randomUUID().toString();
mCustomerRetrievalListeners.put(operationId, listener);
mEphemeralKeyManager.retrieveEphemeralKey(operationId, null, null);
}
} |
java | public ServiceFuture<ServerSecurityAlertPolicyInner> getAsync(String resourceGroupName, String serverName, final ServiceCallback<ServerSecurityAlertPolicyInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, serverName), serviceCallback);
} |
python | def hdmbrcheck(disk_mbr, sector_count, bootable):
# type: (bytes, int, bool) -> int
'''
A function to sanity check an El Torito Hard Drive Master Boot Record (HDMBR).
On success, it returns the system_type (also known as the partition type) that
should be fed into the rest of the El Torito methods. On failure, it raises
an exception.
Parameters:
disk_mbr - The data to look in.
sector_count - The number of sectors expected in the MBR.
bootable - Whether this MBR is bootable.
Returns:
The system (or partition) type the should be fed into the rest of El Torito.
'''
# The MBR that we want to see to do hd emulation boot for El Torito is a standard
# x86 MBR, documented here:
# https://en.wikipedia.org/wiki/Master_boot_record#Sector_layout
#
# In brief, it should consist of 512 bytes laid out like:
# Offset 0x0 - 0x1BD: Bootstrap code area
# Offset 0x1BE - 0x1CD: Partition entry 1
# Offset 0x1CE - 0x1DD: Partition entry 2
# Offset 0x1DE - 0x1ED: Partition entry 3
# Offset 0x1EE - 0x1FD: Partition entry 4
# Offset 0x1FE: 0x55
# Offset 0x1FF: 0xAA
#
# Each partition entry above should consist of:
# Offset 0x0: Active (bit 7 set) or inactive (all zeros)
# Offset 0x1 - 0x3: CHS address of first sector in partition
# Offset 0x1: Head
# Offset 0x2: Sector in bits 0-5, bits 6-7 are high bits of of cylinder
# Offset 0x3: bits 0-7 of cylinder
# Offset 0x4: Partition type (almost all of these are valid, see https://en.wikipedia.org/wiki/Partition_type)
# Offset 0x5 - 0x7: CHS address of last sector in partition (same format as first sector)
# Offset 0x8 - 0xB: LBA of first sector in partition
# Offset 0xC - 0xF: number of sectors in partition
PARTITION_TYPE_UNUSED = 0x0
PARTITION_STATUS_ACTIVE = 0x80
(bootstrap_unused, part1, part2, part3, part4, keybyte1,
keybyte2) = struct.unpack_from('=446s16s16s16s16sBB', disk_mbr, 0)
if keybyte1 != 0x55 or keybyte2 != 0xAA:
raise pycdlibexception.PyCdlibInvalidInput('Invalid magic on HD MBR')
parts = [part1, part2, part3, part4]
system_type = PARTITION_TYPE_UNUSED
for part in parts:
(status, s_head, s_seccyl, s_cyl, parttype, e_head, e_seccyl, e_cyl,
lba_unused, num_sectors_unused) = struct.unpack('=BBBBBBBBLL', part)
if parttype == PARTITION_TYPE_UNUSED:
continue
if system_type != PARTITION_TYPE_UNUSED:
raise pycdlibexception.PyCdlibInvalidInput('Boot image has multiple partitions')
if bootable and status != PARTITION_STATUS_ACTIVE:
# genisoimage prints a warning in this case, but we have no other
# warning prints in the whole codebase, and an exception will probably
# make us too fragile. So we leave the code but don't do anything.
with open(os.devnull, 'w') as devnull:
print('Warning: partition not marked active', file=devnull)
cyl = ((s_seccyl & 0xC0) << 10) | s_cyl
sec = s_seccyl & 0x3f
if cyl != 0 or s_head != 1 or sec != 1:
# genisoimage prints a warning in this case, but we have no other
# warning prints in the whole codebase, and an exception will probably
# make us too fragile. So we leave the code but don't do anything.
with open(os.devnull, 'w') as devnull:
print('Warning: partition does not start at 0/1/1', file=devnull)
cyl = ((e_seccyl & 0xC0) << 10) | e_cyl
sec = e_seccyl & 0x3f
geometry_sectors = (cyl + 1) * (e_head + 1) * sec
if sector_count != geometry_sectors:
# genisoimage prints a warning in this case, but we have no other
# warning prints in the whole codebase, and an exception will probably
# make us too fragile. So we leave the code but don't do anything.
with open(os.devnull, 'w') as devnull:
print('Warning: image size does not match geometry', file=devnull)
system_type = parttype
if system_type == PARTITION_TYPE_UNUSED:
raise pycdlibexception.PyCdlibInvalidInput('Boot image has no partitions')
return system_type |
python | def delete_track(self, href=None):
"""Delete a track.
'href' the relative index of the track. May not be none.
Returns nothing.
If the response status is not 204, throws and APIException."""
# Argument error checking.
assert href is not None
raw_result = self.delete(href)
if raw_result.status != 204:
raise APIException(raw_result.status, raw_result.json) |
java | private void watchPath() {
for (;;) {
//監視キーの送信を待機
WatchKey key;
try {
key = watcher.take();
} catch (InterruptedException ex) {
log.debug("WatchService catched InterruptedException.");
break;
} catch (Throwable ex) {
log.error("Unexpected exception occured.", ex);
break;
}
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent.Kind<?> kind = event.kind();
if (kind == OVERFLOW) {
continue;
}
//ファイル名はイベントのコンテキストです。
@SuppressWarnings("unchecked")
WatchEvent<Path> evt = (WatchEvent<Path>) event;
Path dir = watchDirs.get(key);
Path path = dir.resolve(evt.context());
log.trace("file changed.({}). path={}", kind.name(), path);
boolean isSqlFile = path.toString().endsWith(fileExtension);
if (Files.isDirectory(path) || !isSqlFile) {
// ENTRY_DELETEの時はFiles.isDirectory()がfalseになるので拡張子での判定も行う
if (kind == ENTRY_CREATE) {
traverse(path, true, false);
} else if (kind == ENTRY_DELETE) {
key.cancel();
watchDirs.remove(key);
continue;
}
} else if (isSqlFile) {
if (kind == ENTRY_CREATE) {
traverse(path, true, false);
} else if (kind == ENTRY_MODIFY || kind == ENTRY_DELETE) {
String sqlName = getSqlName(path);
sqlInfos.computeIfPresent(sqlName, (k, v) -> {
return v.computePath(path, kind == ENTRY_DELETE);
});
}
}
}
key.reset();
}
try {
watcher.close();
} catch (IOException e) {
// do nothing
}
} |
java | @Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case TypesPackage.JVM_EXECUTABLE__TYPE_PARAMETERS:
return getTypeParameters();
case TypesPackage.JVM_EXECUTABLE__PARAMETERS:
return getParameters();
case TypesPackage.JVM_EXECUTABLE__EXCEPTIONS:
return getExceptions();
case TypesPackage.JVM_EXECUTABLE__VAR_ARGS:
return isVarArgs();
}
return super.eGet(featureID, resolve, coreType);
} |
java | public int originalFrame() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.entry(this, tc, "originalFrame");
int result;
synchronized (getMessageLockArtefact()) {
if ((contents == null) || reallocated) {
result = -1;
}
else {
result = length;
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) JmfTr.exit(this, tc, "originalFrame", Integer.valueOf(result));
return result;
} |
java | public String apply(final String stringValue, CacheScope cacheScope) {
if (stringValue != null && (lengthLimit < 0 || stringValue.length() <= lengthLimit)) {
return (String) (cacheScope == CacheScope.GLOBAL_SCOPE ? globalCache : applicationCache)
.computeIfAbsent(CharBuffer.wrap(stringValue), s -> {
logger.trace(" (string) writing new interned value {} into {} cache scope", stringValue, cacheScope);
return stringValue;
});
}
return stringValue;
} |
java | public boolean read(DataInputStream daIn, boolean bFixedLength) // Fixed length = false
{
try {
float fData = daIn.readFloat();
Float flData = null;
if (!Float.isNaN(fData))
flData = new Float(fData);
int errorCode = this.setData(flData, DBConstants.DONT_DISPLAY, DBConstants.READ_MOVE);
return (errorCode == DBConstants.NORMAL_RETURN); // Success
} catch (IOException ex) {
ex.printStackTrace();
return false;
}
} |
java | public static boolean isSymmetric(DMatrixRMaj m , double tol ) {
if( m.numCols != m.numRows )
return false;
double max = CommonOps_DDRM.elementMaxAbs(m);
for( int i = 0; i < m.numRows; i++ ) {
for( int j = 0; j < i; j++ ) {
double a = m.get(i,j)/max;
double b = m.get(j,i)/max;
double diff = Math.abs(a-b);
if( !(diff <= tol) ) {
return false;
}
}
}
return true;
} |
java | public JobTargetGroupInner get(String resourceGroupName, String serverName, String jobAgentName, String targetGroupName) {
return getWithServiceResponseAsync(resourceGroupName, serverName, jobAgentName, targetGroupName).toBlocking().single().body();
} |
python | def populate_items(self, request):
'''populate and returns filtered items'''
self._items = self.get_items(request)
return self.items |
java | public CompositeEntityExtractor getCompositeEntity(UUID appId, String versionId, UUID cEntityId) {
return getCompositeEntityWithServiceResponseAsync(appId, versionId, cEntityId).toBlocking().single().body();
} |
java | public static void main(String[] args) throws Exception {
String indexLocation = args[0];
int numTrainVectors = Integer.parseInt(args[1]);
int vectorLength = Integer.parseInt(args[2]);
int numPrincipalComponents = Integer.parseInt(args[3]);
boolean whitening = true;
boolean compact = false;
PCA pca = new PCA(numPrincipalComponents, numTrainVectors, vectorLength, whitening);
pca.setCompact(compact);
// load the vectors into the PCA class
Linear vladArray = new Linear(vectorLength, numTrainVectors, true, indexLocation, false, true, 0);
for (int i = 0; i < numTrainVectors; i++) {
pca.addSample(vladArray.getVector(i));
}
// now we are able to perform SVD and compute the eigenvectors
System.out.println("PCA computation started!");
long start = System.currentTimeMillis();
pca.computeBasis();
long end = System.currentTimeMillis();
System.out.println("PCA computation completed in " + (end - start) + " ms");
// now we can save the PCA matrix in a file
String PCAfile = indexLocation + "pca_" + numTrainVectors + "_" + numPrincipalComponents + "_"
+ (end - start) + "ms.txt";
pca.savePCAToFile(PCAfile);
} |
python | def compute_score(self):
"""Calculate the overall test score using the configuration."""
# LOGGER.info("Begin scoring")
cases = self.get_configured_tests() | set(self.result.cases)
scores = DataFrame({"score": 0.0, "max": 1.0},
index=sorted(cases))
self.result.setdefault("score", dict())
self.result["score"]["sections"] = list()
# Calculate the scores for each test individually.
for test, result in iteritems(self.result.cases):
# LOGGER.info("Calculate score for test: '%s'.", test)
# Test metric may be a dictionary for a parametrized test.
metric = result["metric"]
if hasattr(metric, "items"):
result["score"] = test_score = dict()
total = 0.0
for key, value in iteritems(metric):
value = 1.0 - value
total += value
test_score[key] = value
# For some reason there are parametrized tests without cases.
if len(metric) == 0:
metric = 0.0
else:
metric = total / len(metric)
else:
metric = 1.0 - metric
scores.at[test, "score"] = metric
scores.loc[test, :] *= self.config["weights"].get(test, 1.0)
score = 0.0
maximum = 0.0
# Calculate the scores for each section considering the individual test
# case scores.
for section_id, card in iteritems(
self.config['cards']['scored']['sections']
):
# LOGGER.info("Calculate score for section: '%s'.", section_id)
cases = card.get("cases", None)
if cases is None:
continue
card_score = scores.loc[cases, "score"].sum()
card_total = scores.loc[cases, "max"].sum()
# Format results nicely to work immediately with Vega Bar Chart.
section_score = {"section": section_id,
"score": card_score / card_total}
self.result["score"]["sections"].append(section_score)
# Calculate the final score for the entire model.
weight = card.get("weight", 1.0)
score += card_score * weight
maximum += card_total * weight
self.result["score"]["total_score"] = score / maximum |
python | def _error(self, exc_info):
""" Retrieves the error info """
if self._exc_info:
if self._traceback:
return exc_info
return exc_info[:2]
return exc_info[1] |
python | def shuffled_batches(self, batch_size):
""" Generate randomized batches of data """
if batch_size >= self.size:
yield self
else:
batch_splits = math_util.divide_ceiling(self.size, batch_size)
indices = list(range(self.size))
np.random.shuffle(indices)
for sub_indices in np.array_split(indices, batch_splits):
yield Transitions(
size=len(sub_indices),
environment_information=None,
# Dont use it in batches for a moment, can be uncommented later if needed
# environment_information=[info[sub_indices.tolist()] for info in self.environment_information]
transition_tensors={k: v[sub_indices] for k, v in self.transition_tensors.items()}
# extra_data does not go into batches
) |
python | def create_predictable_zip(path):
"""
Create a zip file with predictable sort order and metadata so that MD5 will
stay consistent if zipping the same content twice.
Args:
path (str): absolute path either to a directory to zip up, or an existing zip file to convert.
Returns: path (str) to the output zip file
"""
# if path is a directory, recursively enumerate all the files under the directory
if os.path.isdir(path):
paths = []
for root, directories, filenames in os.walk(path):
paths += [os.path.join(root, filename)[len(path)+1:] for filename in filenames]
reader = lambda x: _read_file(os.path.join(path, x))
# otherwise, if it's a zip file, open it up and pull out the list of names
elif os.path.isfile(path) and os.path.splitext(path)[1] == ".zip":
inputzip = zipfile.ZipFile(path)
paths = inputzip.namelist()
reader = lambda x: inputzip.read(x)
else:
raise Exception("The `path` must either point to a directory or to a zip file.")
# create a temporary zip file path to write the output into
zippathfd, zippath = tempfile.mkstemp(suffix=".zip")
with zipfile.ZipFile(zippath, "w") as outputzip:
# loop over the file paths in sorted order, to ensure a predictable zip
for filepath in sorted(paths):
write_file_to_zip_with_neutral_metadata(outputzip, filepath, reader(filepath))
os.fdopen(zippathfd).close()
return zippath |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.