language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
python | def cancelMatchRequest(cfg):
"""obtain information housed on the ladder about playerName"""
payload = json.dumps([cfg.thePlayer])
ladder = cfg.ladder
return requests.post(
url = c.URL_BASE%(ladder.ipAddress, ladder.serverPort, "cancelmatch"),
data = payload,
#headers=headers,
) |
python | def update_message(self, queue_name, message_id, pop_receipt, visibility_timeout,
content=None, timeout=None):
'''
Updates the visibility timeout of a message. You can also use this
operation to update the contents of a message.
This operation can be used to continually extend the invisibility of a
queue message. This functionality can be useful if you want a worker role
to “lease” a queue message. For example, if a worker role calls get_messages
and recognizes that it needs more time to process a message, it can
continually extend the message’s invisibility until it is processed. If
the worker role were to fail during processing, eventually the message
would become visible again and another worker role could process it.
:param str queue_name:
The name of the queue containing the message to update.
:param str message_id:
The message id identifying the message to update.
:param str pop_receipt:
A valid pop receipt value returned from an earlier call
to the :func:`~get_messages` or :func:`~update_message` operation.
:param int visibility_timeout:
Specifies the new visibility timeout value, in seconds,
relative to server time. The new value must be larger than or equal
to 0, and cannot be larger than 7 days. The visibility timeout of a
message cannot be set to a value later than the expiry time. A
message can be updated until it has been deleted or has expired.
:param obj content:
Message content. Allowed type is determined by the encode_function
set on the service. Default is str.
:param int timeout:
The server timeout, expressed in seconds.
:return:
A list of :class:`~azure.storage.queue.models.QueueMessage` objects. Note that
only time_next_visible and pop_receipt will be populated.
:rtype: list of :class:`~azure.storage.queue.models.QueueMessage`
'''
_validate_not_none('queue_name', queue_name)
_validate_not_none('message_id', message_id)
_validate_not_none('pop_receipt', pop_receipt)
_validate_not_none('visibility_timeout', visibility_timeout)
request = HTTPRequest()
request.method = 'PUT'
request.host = self._get_host()
request.path = _get_path(queue_name, True, message_id)
request.query = [
('popreceipt', _to_str(pop_receipt)),
('visibilitytimeout', _int_to_str(visibility_timeout)),
('timeout', _int_to_str(timeout))
]
if content is not None:
request.body = _get_request_body(_convert_queue_message_xml(content, self.encode_function))
response = self._perform_request(request)
return _parse_queue_message_from_headers(response) |
python | def add(self,dimlist,dimvalues):
'''
add dimensions
:parameter dimlist: list of dimensions
:parameter dimvalues: list of values for dimlist
'''
for i,d in enumerate(dimlist):
self[d] = dimvalues[i]
self.set_ndims() |
java | public void setUnprocessedResourceIdentifiers(java.util.Collection<AggregateResourceIdentifier> unprocessedResourceIdentifiers) {
if (unprocessedResourceIdentifiers == null) {
this.unprocessedResourceIdentifiers = null;
return;
}
this.unprocessedResourceIdentifiers = new com.amazonaws.internal.SdkInternalList<AggregateResourceIdentifier>(unprocessedResourceIdentifiers);
} |
java | public Observable<ConnectionInner> getAsync(String resourceGroupName, String automationAccountName, String connectionName) {
return getWithServiceResponseAsync(resourceGroupName, automationAccountName, connectionName).map(new Func1<ServiceResponse<ConnectionInner>, ConnectionInner>() {
@Override
public ConnectionInner call(ServiceResponse<ConnectionInner> response) {
return response.body();
}
});
} |
python | def ServiceAccountCredentialsFromFile(filename, scopes, user_agent=None):
"""Use the credentials in filename to create a token for scopes."""
filename = os.path.expanduser(filename)
# We have two options, based on our version of oauth2client.
if oauth2client.__version__ > '1.5.2':
# oauth2client >= 2.0.0
credentials = (
service_account.ServiceAccountCredentials.from_json_keyfile_name(
filename, scopes=scopes))
if credentials is not None:
if user_agent is not None:
credentials.user_agent = user_agent
return credentials
else:
# oauth2client < 2.0.0
with open(filename) as keyfile:
service_account_info = json.load(keyfile)
account_type = service_account_info.get('type')
if account_type != oauth2client.client.SERVICE_ACCOUNT:
raise exceptions.CredentialsError(
'Invalid service account credentials: %s' % (filename,))
# pylint: disable=protected-access
credentials = service_account._ServiceAccountCredentials(
service_account_id=service_account_info['client_id'],
service_account_email=service_account_info['client_email'],
private_key_id=service_account_info['private_key_id'],
private_key_pkcs8_text=service_account_info['private_key'],
scopes=scopes, user_agent=user_agent)
# pylint: enable=protected-access
return credentials |
python | def shorten_name(name, char_limit, side='right'):
"""Shorten `name` if it is longer than `char_limit`.
If `side` == "right" then the right side of the name is shortened;
if "left" then the left side is shortened.
In either case, the suffix of the name is preserved.
"""
# TODO: A more elegant way to do this?
if char_limit is not None and len(name) > char_limit:
info = get_fileinfo(name)
if info.numhdu is not None:
i = name.rindex('[')
s = (name[:i], name[i:])
len_sfx = len(s[1])
len_pfx = char_limit - len_sfx - 4 + 1
if len_pfx > 0:
if side == 'right':
name = '{0}...{1}'.format(s[0][:len_pfx], s[1])
elif side == 'left':
name = '...{0}{1}'.format(s[0][-len_pfx:], s[1])
else:
name = '...{0}'.format(s[1])
else:
len1 = char_limit - 3 + 1
if side == 'right':
name = '{0}...'.format(name[:len1])
elif side == 'left':
name = '...{0}'.format(name[-len1:])
return name |
python | def _get_local_fields(self, model):
"Return the names of all locally defined fields on the model class."
local = [f for f in model._meta.fields]
m2m = [f for f in model._meta.many_to_many]
fields = local + m2m
names = tuple([x.name for x in fields])
return {
':local': dict(list(zip(names, fields))),
} |
java | protected String getRelativePath(final HttpServletRequest request) {
// IMPORTANT: DefaultServlet can be mapped to '/' or '/path/*' but
// always
// serves resources from the web app root with context rooted paths.
// i.e. it can not be used to mount the web app root under a sub-path
// This method must construct a complete context rooted path, although
// subclasses can change this behaviour.
// Are we being processed by a RequestDispatcher.include()?
if (request.getAttribute(RequestDispatcher.INCLUDE_REQUEST_URI) != null) {
String result = (String) request.getAttribute(RequestDispatcher.INCLUDE_PATH_INFO);
if (result == null) {
result = (String) request.getAttribute(RequestDispatcher.INCLUDE_SERVLET_PATH);
} else {
result = (String) request.getAttribute(RequestDispatcher.INCLUDE_SERVLET_PATH) + result;
}
if ((result == null) || (result.equals(""))) {
result = "/";
}
return (result);
}
// No, extract the desired path directly from the request
String result = request.getPathInfo();
if (result == null) {
result = request.getServletPath();
} else {
result = request.getServletPath() + result;
}
if ((result == null) || (result.equals(""))) {
result = "/";
}
return (result);
} |
java | public static <T> List<T> resultSetToList(final ResultSet resultSet, final Class<T> targetClass) throws SQLException
{
final List<T> list = new ArrayList<>();
if (!resultSet.next()) {
resultSet.close();
return list;
}
final Introspected introspected = Introspector.getIntrospected(targetClass);
final boolean hasJoinColumns = introspected.hasSelfJoinColumn();
final Map<T, Object> deferredSelfJoinFkMap = (hasJoinColumns ? new HashMap<>() : null);
final Map<Object, T> idToTargetMap = (hasJoinColumns ? new HashMap<>() : null);
final ResultSetMetaData metaData = resultSet.getMetaData();
final int columnCount = metaData.getColumnCount();
final String[] columnNames = new String[columnCount];
for (int column = columnCount; column > 0; column--) {
columnNames[column - 1] = metaData.getColumnName(column).toLowerCase();
}
try (final ResultSet closeRS = resultSet) {
do {
final T target = targetClass.newInstance();
list.add(target);
for (int column = columnCount; column > 0; column--) {
final Object columnValue = resultSet.getObject(column);
if (columnValue == null) {
continue;
}
final String columnName = columnNames[column - 1];
final FieldColumnInfo fcInfo = introspected.getFieldColumnInfo(columnName);
if (fcInfo.isSelfJoinField()) {
deferredSelfJoinFkMap.put(target, columnValue);
}
else {
introspected.set(target, fcInfo, columnValue);
}
}
if (hasJoinColumns) {
idToTargetMap.put(introspected.getActualIds(target)[0], target);
}
}
while (resultSet.next());
}
catch (Exception e) {
throw new RuntimeException(e);
}
try {
if (hasJoinColumns) {
// set the self join object instances based on the foreign key ids...
final FieldColumnInfo idColumn = introspected.getSelfJoinColumnInfo();
for (Entry<T, Object> entry : deferredSelfJoinFkMap.entrySet()) {
final T value = idToTargetMap.get(entry.getValue());
if (value != null) {
introspected.set(entry.getKey(), idColumn, value);
}
}
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
return list;
} |
python | def on_startup(self, callback: callable, polling=True, webhook=True):
"""
Register a callback for the startup process
:param callback:
:param polling: use with polling
:param webhook: use with webhook
"""
self._check_frozen()
if not webhook and not polling:
warn('This action has no effect!', UserWarning)
return
if isinstance(callback, (list, tuple, set)):
for cb in callback:
self.on_startup(cb, polling, webhook)
return
if polling:
self._on_startup_polling.append(callback)
if webhook:
self._on_startup_webhook.append(callback) |
java | public int fasthdlc_rx_run(HdlcState h) {
int next;
int retval = RETURN_EMPTY_FLAG;
while ((h.bits >= minbits[h.state]) && (retval == RETURN_EMPTY_FLAG)) {
/*
* Run until we can no longer be assured that we will have enough bits to continue
*/
switch (h.state) {
case FRAME_SEARCH:
/*
* Look for an HDLC frame, keying from the top byte.
*/
next = hdlc_search[(h.data >> 24) & 0xff];
h.bits -= next & 0x0f;
h.data <<= next & 0x0f;
h.state = (next >> 4) & 0xff;
h.ones = 0;
break;
case PROCESS_FRAME:
/* Process as much as the next ten bits */
next = hdlc_frame[h.ones][(h.data >>> 22) & 0x3ff]; // Must be 10 bits here, not 8, that's all
// next = hdlc_frame_precalc(h.ones, (h.data >> 22)& 0x3ff);
h.bits -= (((next & 0x0f00) >> 8) & 0xff);
h.data <<= (((next & 0x0f00) >> 8) & 0xff);
h.state = ((next & STATE_MASK) >> 15) & 0xff;
h.ones = (((next & ONES_MASK) >> 12) & 0xff);
switch (next & STATUS_MASK) {
case STATUS_CONTROL:
if ((next & CONTROL_COMPLETE) != 0) {
/* A complete, valid frame received */
retval = (RETURN_COMPLETE_FLAG);
/* Stay in this state */
h.state = 1;
} else {
/* An abort (either out of sync of explicit) */
retval = (RETURN_DISCARD_FLAG);
}
break;
case STATUS_VALID:
retval = (next & DATA_MASK);
}
}
}
return retval;
} |
java | public static Description createSuiteDescription(Class<?> testClass) {
return new Description(testClass, testClass.getName(), testClass.getAnnotations());
} |
java | public static Builder newParserConfigBuilder() {
return new Builder() {
boolean used = false;
CollectionBuilder.Factory listFactory = DEFAULT_LIST_FACTORY;
CollectionBuilder.Factory vectorFactory = DEFAULT_VECTOR_FACTORY;
CollectionBuilder.Factory setFactory = DEFAULT_SET_FACTORY;
CollectionBuilder.Factory mapFactory = DEFAULT_MAP_FACTORY;
Map<Tag, TagHandler> tagHandlers = defaultTagHandlers();
public Builder setListFactory(CollectionBuilder.Factory listFactory) {
checkState();
this.listFactory = listFactory;
return this;
}
public Builder setVectorFactory(CollectionBuilder.Factory vectorFactory) {
checkState();
this.vectorFactory = vectorFactory;
return this;
}
public Builder setSetFactory(CollectionBuilder.Factory setFactory) {
checkState();
this.setFactory = setFactory;
return this;
}
public Builder setMapFactory(CollectionBuilder.Factory mapFactory) {
checkState();
this.mapFactory = mapFactory;
return this;
}
public Builder putTagHandler(Tag tag, TagHandler handler) {
checkState();
this.tagHandlers.put(tag, handler);
return this;
}
public Config build() {
checkState();
used = true;
return new Config() {
public Factory getListFactory() {
return listFactory;
}
public Factory getVectorFactory() {
return vectorFactory;
}
public Factory getSetFactory() {
return setFactory;
}
public Factory getMapFactory() {
return mapFactory;
}
public TagHandler getTagHandler(Tag tag) {
return tagHandlers.get(tag);
}
};
}
private void checkState() {
if (used) {
throw new IllegalStateException(
"Builder is single-use. Not usable after build()");
}
}
};
} |
java | private void removeVetoedClasses(Set<Class<?>> classes) {
//get hold of classnames
Set<String> classNames = new HashSet<String>();
for (Class<?> clazz : classes) {
classNames.add(clazz.getName());
}
// take into considerations of the exclude in beans.xml
Collection<String> includedClasses = WeldCDIUtils.filterClassesBasedOnBeansXML(this.beansXml, this.resourceLoader, classNames);
Iterator<Class<?>> iterator = classes.iterator();
while (iterator.hasNext()) {
Class<?> clazz = iterator.next();
if (WeldCDIUtils.isClassVetoed(clazz)) {
iterator.remove();
} else if (!includedClasses.contains(clazz.getName())) {
iterator.remove();
}
}
} |
java | protected void checkUnboundPrefixInEntRef(Node node) {
Node child, next;
for (child = node.getFirstChild(); child != null; child = next) {
next = child.getNextSibling();
if (child.getNodeType() == Node.ELEMENT_NODE) {
//If a NamespaceURI is not declared for the current
//node's prefix, raise a fatal error.
String prefix = child.getPrefix();
if (prefix != null
&& fNSBinder.getURI(prefix) == null) {
String msg =
Utils.messages.createMessage(
MsgKey.ER_ELEM_UNBOUND_PREFIX_IN_ENTREF,
new Object[] {
node.getNodeName(),
child.getNodeName(),
prefix });
if (fErrorHandler != null) {
fErrorHandler.handleError(
new DOMErrorImpl(
DOMError.SEVERITY_FATAL_ERROR,
msg,
MsgKey.ER_ELEM_UNBOUND_PREFIX_IN_ENTREF,
null,
null,
null));
}
}
NamedNodeMap attrs = child.getAttributes();
for (int i = 0; i < attrs.getLength(); i++) {
String attrPrefix = attrs.item(i).getPrefix();
if (attrPrefix != null
&& fNSBinder.getURI(attrPrefix) == null) {
String msg =
Utils.messages.createMessage(
MsgKey.ER_ATTR_UNBOUND_PREFIX_IN_ENTREF,
new Object[] {
node.getNodeName(),
child.getNodeName(),
attrs.item(i)});
if (fErrorHandler != null) {
fErrorHandler.handleError(
new DOMErrorImpl(
DOMError.SEVERITY_FATAL_ERROR,
msg,
MsgKey.ER_ATTR_UNBOUND_PREFIX_IN_ENTREF,
null,
null,
null));
}
}
}
}
if (child.hasChildNodes()) {
checkUnboundPrefixInEntRef(child);
}
}
} |
python | def create_code_cell(block):
"""Create a notebook code cell from a block."""
code_cell = nbbase.new_code_cell(source=block['content'])
attr = block['attributes']
if not attr.is_empty:
code_cell.metadata \
= nbbase.NotebookNode({'attributes': attr.to_dict()})
execution_count = attr.kvs.get('n')
if not execution_count:
code_cell.execution_count = None
else:
code_cell.execution_count = int(execution_count)
return code_cell |
java | public OMVRBTreeEntry<K, V> getPreviousInMemory() {
OMVRBTreeEntry<K, V> t = this;
OMVRBTreeEntry<K, V> p = null;
if (t.getLeftInMemory() != null) {
p = t.getLeftInMemory();
while (p.getRightInMemory() != null)
p = p.getRightInMemory();
} else {
p = t.getParentInMemory();
while (p != null && t == p.getLeftInMemory()) {
t = p;
p = p.getParentInMemory();
}
}
return p;
} |
java | @When("^I create an elasticsearch index named '(.+?)'( removing existing index if exist)?$")
public void createElasticsearchIndex(String index, String removeIndex) {
if (removeIndex != null && commonspec.getElasticSearchClient().indexExists(index)) {
commonspec.getElasticSearchClient().dropSingleIndex(index);
}
commonspec.getElasticSearchClient().createSingleIndex(index);
} |
java | public void setValue(CharSequence value) {
Validate.notNull(value);
setValue(value, 0, value.length());
} |
python | def pyxb_to_dict(rp_pyxb):
"""Convert ReplicationPolicy PyXB object to a normalized dict.
Args:
rp_pyxb: ReplicationPolicy to convert.
Returns:
dict : Replication Policy as normalized dict.
Example::
{
'allowed': True,
'num': 3,
'blockedMemberNode': {'urn:node:NODE1', 'urn:node:NODE2', 'urn:node:NODE3'},
'preferredMemberNode': {'urn:node:NODE4', 'urn:node:NODE5'},
}
"""
return {
'allowed': bool(_get_attr_or_list(rp_pyxb, 'allowed')),
'num': _get_as_int(rp_pyxb),
'block': _get_as_set(rp_pyxb, 'block'),
'pref': _get_as_set(rp_pyxb, 'pref'),
} |
python | def get_portchannel_info_by_intf_output_lacp_partner_brcd_state(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_portchannel_info_by_intf = ET.Element("get_portchannel_info_by_intf")
config = get_portchannel_info_by_intf
output = ET.SubElement(get_portchannel_info_by_intf, "output")
lacp = ET.SubElement(output, "lacp")
partner_brcd_state = ET.SubElement(lacp, "partner-brcd-state")
partner_brcd_state.text = kwargs.pop('partner_brcd_state')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
python | def _search(self, base, fltr, attrs=None, scope=ldap.SCOPE_SUBTREE):
"""Perform LDAP search"""
try:
results = self._conn.search_s(base, scope, fltr, attrs)
except Exception as e:
log.exception(self._get_ldap_msg(e))
results = False
return results |
java | private void removeParentPropertyChangeListener(PropertyChangeListener listener) {
if (rootParent instanceof JFrame) {
((JFrame) rootParent).removePropertyChangeListener(listener);
} else if (rootParent instanceof JDialog) {
((JDialog) rootParent).removePropertyChangeListener(listener);
}
} |
python | def ystep(self):
r"""Minimise Augmented Lagrangian with respect to :math:`\mathbf{y}`.
If this method is not overridden, the problem is solved without
any regularisation other than the option enforcement of
non-negativity of the solution and filter boundary crossing
supression. When it is overridden, it should be explicitly
called at the end of the overriding method.
"""
if self.opt['NonNegCoef']:
self.Y[self.Y < 0.0] = 0.0
if self.opt['NoBndryCross']:
for n in range(0, self.cri.dimN):
self.Y[(slice(None),) * n +
(slice(1 - self.D.shape[n], None),)] = 0.0 |
java | public Datatype.Builder setHasToBuilderMethod(boolean hasToBuilderMethod) {
this.hasToBuilderMethod = hasToBuilderMethod;
_unsetProperties.remove(Property.HAS_TO_BUILDER_METHOD);
return (Datatype.Builder) this;
} |
java | private void autoDetectAnimatedGroups()
{
animatedShapes = model.getAnimatedShapes();
staticShapes = model.getShapeNames().stream().filter(s -> !animatedShapes.contains(s)).collect(Collectors.toSet());
//Debug: all animated
// animatedShapes.addAll(staticShapes);
// staticShapes.clear();
} |
python | def get_cpuid_leaf(self, idx, idx_sub):
"""Returns the virtual CPU cpuid information for the specified leaf.
Currently supported index values for cpuid:
Standard CPUID leaves: 0 - 0x1f
Extended CPUID leaves: 0x80000000 - 0x8000001f
VIA CPUID leaves: 0xc0000000 - 0xc000000f
See the Intel, AMD and VIA programmer's manuals for detailed information
about the CPUID instruction and its leaves.
in idx of type int
CPUID leaf index.
in idx_sub of type int
CPUID leaf sub-index (ECX). Set to 0xffffffff (or 0) if not applicable.
out val_eax of type int
CPUID leaf value for register eax.
out val_ebx of type int
CPUID leaf value for register ebx.
out val_ecx of type int
CPUID leaf value for register ecx.
out val_edx of type int
CPUID leaf value for register edx.
raises :class:`OleErrorInvalidarg`
Invalid index.
"""
if not isinstance(idx, baseinteger):
raise TypeError("idx can only be an instance of type baseinteger")
if not isinstance(idx_sub, baseinteger):
raise TypeError("idx_sub can only be an instance of type baseinteger")
(val_eax, val_ebx, val_ecx, val_edx) = self._call("getCPUIDLeaf",
in_p=[idx, idx_sub])
return (val_eax, val_ebx, val_ecx, val_edx) |
java | @Override
public String resolveVariant(HttpServletRequest request) {
String browser = null;
String userAgent = request.getHeader("User-Agent");
if (userAgent != null) {
Matcher matcher = IE_PATTERN.matcher(userAgent);
if (matcher.find()) {
browser = "ie" + matcher.group(1);
} else if (userAgent.contains("AppleWebKit")) {
browser = "webkit";
} else if (userAgent.contains("Firefox")) {
browser = "firefox";
} else if (userAgent.contains("Opera")) {
browser = "opera";
}
}
return browser;
} |
java | public List<FacesConfigConverterType<FacesConfigType<T>>> getAllConverter()
{
List<FacesConfigConverterType<FacesConfigType<T>>> list = new ArrayList<FacesConfigConverterType<FacesConfigType<T>>>();
List<Node> nodeList = childNode.get("converter");
for(Node node: nodeList)
{
FacesConfigConverterType<FacesConfigType<T>> type = new FacesConfigConverterTypeImpl<FacesConfigType<T>>(this, "converter", childNode, node);
list.add(type);
}
return list;
} |
java | public Evaluator sync(Map<String, List<Subscription>> subs) {
Set<String> removed = subscriptions.keySet();
removed.removeAll(subs.keySet());
removed.forEach(this::removeGroupSubscriptions);
subs.forEach(this::addGroupSubscriptions);
return this;
} |
python | def y(self, y):
"""Project reversed y"""
if y is None:
return None
return (self.height * (y - self.box.ymin) / self.box.height) |
java | public void project_serviceName_storage_containerId_cors_DELETE(String serviceName, String containerId, String origin) throws IOException {
String qPath = "/cloud/project/{serviceName}/storage/{containerId}/cors";
StringBuilder sb = path(qPath, serviceName, containerId);
query(sb, "origin", origin);
exec(qPath, "DELETE", sb.toString(), null);
} |
java | public static void validBigTextLength(String pContent) throws APPErrorException {
if(!StringUtil.isNullOrEmpty(pContent)&&pContent.length()>10000)
{
throw new APPErrorException("内容长度超过10000,请调整;");
}
} |
java | static public final String formatForSource(String s) {
StringBuilder buffer = new StringBuilder();
for (int i=0; i<s.length();) {
if (i > 0) buffer.append('+').append(LINE_SEPARATOR);
buffer.append(" \"");
int count = 11;
while (i<s.length() && count<80) {
char c = s.charAt(i++);
if (c < '\u0020' || c == '"' || c == '\\') {
if (c == '\n') {
buffer.append("\\n");
count += 2;
} else if (c == '\t') {
buffer.append("\\t");
count += 2;
} else if (c == '\r') {
buffer.append("\\r");
count += 2;
} else {
// Represent control characters, backslash and double quote
// using octal notation; otherwise the string we form
// won't compile, since Unicode escape sequences are
// processed before tokenization.
buffer.append('\\');
buffer.append(HEX_DIGIT[(c & 0700) >> 6]); // HEX_DIGIT works for octal
buffer.append(HEX_DIGIT[(c & 0070) >> 3]);
buffer.append(HEX_DIGIT[(c & 0007)]);
count += 4;
}
}
else if (c <= '\u007E') {
buffer.append(c);
count += 1;
}
else {
buffer.append("\\u");
buffer.append(HEX_DIGIT[(c & 0xF000) >> 12]);
buffer.append(HEX_DIGIT[(c & 0x0F00) >> 8]);
buffer.append(HEX_DIGIT[(c & 0x00F0) >> 4]);
buffer.append(HEX_DIGIT[(c & 0x000F)]);
count += 6;
}
}
buffer.append('"');
}
return buffer.toString();
} |
java | private void initializePasswordEditText() {
passwordEditText = (PasswordEditText) findViewById(R.id.password_edit_text);
passwordEditText.addValidator(Validators
.minLength(this, R.string.password_min_length_validator_error_message,
MIN_PASSWORD_LENGTH));
passwordEditText.addValidator(Validators
.maxLength(this, R.string.max_length_validator_error_messsage, MAX_CHARACTERS));
passwordEditText.addValidator(
Validators.noWhitespace(this, R.string.no_whitespace_validator_error_message));
passwordEditText.addAllConstraints(Constraints.minLength(SUGGESTED_PASSWORD_LENGTH),
Constraints.containsLetter(), Constraints.containsNumber(),
Constraints.containsSymbol());
passwordEditText.addAllHelperTextIds(R.string.password_edit_text_helper_text0,
R.string.password_edit_text_helper_text1, R.string.password_edit_text_helper_text2,
R.string.password_edit_text_helper_text3, R.string.password_edit_text_helper_text4);
passwordEditText.addAllHelperTextColorIds(R.color.password_edit_text_helper_text_color0,
R.color.password_edit_text_helper_text_color1,
R.color.password_edit_text_helper_text_color2,
R.color.password_edit_text_helper_text_color3,
R.color.password_edit_text_helper_text_color4);
} |
python | def interface_to_relations(interface_name):
"""
Given an interface, return a list of relation names for the current
charm that use that interface.
:returns: A list of relation names.
"""
results = []
for role in ('provides', 'requires', 'peers'):
results.extend(role_and_interface_to_relations(role, interface_name))
return results |
python | def setOutBoundLinkQuality(self, LinkQuality):
"""set custom LinkQualityIn for all receiving messages from the any address
Args:
LinkQuality: a given custom link quality
link quality/link margin mapping table
3: 21 - 255 (dB)
2: 11 - 20 (dB)
1: 3 - 9 (dB)
0: 0 - 2 (dB)
Returns:
True: successful to set the link quality
False: fail to set the link quality
"""
print '%s call setOutBoundLinkQuality' % self.port
print LinkQuality
try:
cmd = 'macfilter rss add-lqi * %s' % str(LinkQuality)
print cmd
return self.__sendCommand(cmd)[0] == 'Done'
except Exception, e:
ModuleHelper.WriteIntoDebugLogger("setOutBoundLinkQuality() Error: " + str(e)) |
python | def mygenerator(n=5, n_edges=5):
'''
Just a simple generator that creates a network with n nodes and
n_edges edges. Edges are assigned randomly, only avoiding self loops.
'''
G = nx.Graph()
for i in range(n):
G.add_node(i)
for i in range(n_edges):
nodes = list(G.nodes)
n_in = choice(nodes)
nodes.remove(n_in) # Avoid loops
n_out = choice(nodes)
G.add_edge(n_in, n_out)
return G |
java | public EnableEnhancedMonitoringResult withCurrentShardLevelMetrics(MetricsName... currentShardLevelMetrics) {
com.amazonaws.internal.SdkInternalList<String> currentShardLevelMetricsCopy = new com.amazonaws.internal.SdkInternalList<String>(
currentShardLevelMetrics.length);
for (MetricsName value : currentShardLevelMetrics) {
currentShardLevelMetricsCopy.add(value.toString());
}
if (getCurrentShardLevelMetrics() == null) {
setCurrentShardLevelMetrics(currentShardLevelMetricsCopy);
} else {
getCurrentShardLevelMetrics().addAll(currentShardLevelMetricsCopy);
}
return this;
} |
java | private static void setField(Object object, Object value, Field foundField) {
foundField.setAccessible(true);
try {
int fieldModifiersMask = foundField.getModifiers();
removeFinalModifierIfPresent(foundField);
foundField.set(object, value);
restoreModifiersToFieldIfChanged(fieldModifiersMask, foundField);
} catch (IllegalAccessException e) {
throw new RuntimeException("Internal error: Failed to set field in method setInternalState.", e);
}
} |
python | def edit_user_login(self, id, account_id, login_integration_id=None, login_password=None, login_sis_user_id=None, login_unique_id=None):
"""
Edit a user login.
Update an existing login for a user in the given account.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""ID"""
path["account_id"] = account_id
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
# OPTIONAL - login[unique_id]
"""The new unique ID for the login."""
if login_unique_id is not None:
data["login[unique_id]"] = login_unique_id
# OPTIONAL - login[password]
"""The new password for the login. Can only be set by an admin user if admins
are allowed to change passwords for the account."""
if login_password is not None:
data["login[password]"] = login_password
# OPTIONAL - login[sis_user_id]
"""SIS ID for the login. To set this parameter, the caller must be able to
manage SIS permissions on the account."""
if login_sis_user_id is not None:
data["login[sis_user_id]"] = login_sis_user_id
# OPTIONAL - login[integration_id]
"""Integration ID for the login. To set this parameter, the caller must be able to
manage SIS permissions on the account. The Integration ID is a secondary
identifier useful for more complex SIS integrations."""
if login_integration_id is not None:
data["login[integration_id]"] = login_integration_id
self.logger.debug("PUT /api/v1/accounts/{account_id}/logins/{id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/accounts/{account_id}/logins/{id}".format(**path), data=data, params=params, no_data=True) |
python | def schema(body_schema=None, body_required=False, query_schema=None, # noqa
content_types=None, default_body=None):
"""Decorator to parse and validate API body and query string.
This decorator allows one to define the entire 'schema' for an API
endpoint.
:keyword body_schema:
Callable that accepts raw data and returns the coerced (or unchanged)
body content if it is valid. Otherwise, an error should be raised.
:keyword body_required:
`True` if some body content is required by the request. Defaults to
`False`.
:keyword query_schema:
Callable that accepts raw data and returns the coerced (or unchanged)
query string content if it is valid. Otherwise, an error should be
raised.
:keyword content_types:
List of allowed contents types for request body contents. Defaults to
`['application/json']`.
:keyword default_body:
Default body value to pass to the endpoint handler if `body_required`
is `True` but no body was given. This can be useful for specifying
complex request body defaults.
"""
if not content_types:
content_types = ['application/json']
if not all('json' in t for t in content_types):
raise NotImplementedError("Only 'json' body supported.")
def deco(func):
"""Return a decorated callable."""
def wrapped(*args, **kwargs):
"""Validate/coerce request body and parameters."""
try:
# validate the request body per the schema (if applicable):
try:
body = bottle.request.json
except ValueError as exc:
raise simpl_rest.HTTPError(
body=str(exc),
status=400,
exception=exc,
traceback=traceback.format_exc(),
)
if body is None:
body = default_body
if body_required and not body:
raise simpl_rest.HTTPError(
body='Request body cannot be empty.',
status=400,
)
if body_schema:
try:
body = body_schema(body)
except volup.MultipleInvalid as exc:
raise MultiValidationError(exc.errors)
# validate the query string per the schema (if application):
query = bottle.request.query.dict # pylint: disable=no-member
if query_schema is not None:
try:
query = query_schema(query)
except volup.MultipleInvalid as exc:
raise MultiValidationError(exc.errors)
if not query:
# If the query dict is empty, just set it to None.
query = None
# Conditionally add 'body' or 'schema' to kwargs.
if any([body_schema, body_required, default_body]):
kwargs['body'] = body
if query_schema:
kwargs['query'] = query
return func(
*args,
**kwargs
)
except MultiValidationError as exc:
raise simpl_rest.HTTPError(
body=str(exc),
status=400,
exception=exc,
traceback=traceback.format_exc(),
)
return wrapped
return deco |
java | public void insertAll(List<E> entries) {
if(!initialized && !entries.isEmpty()) {
initialize(entries.get(0));
}
for(E entry : entries) {
insert(entry, false);
}
} |
python | def load_genotypes(self):
"""This really just intializes the file by opening it up. """
if DataParser.compressed_pedigree:
self.genotype_file = gzip.open("%s.gz" % self.tped_file, 'rb')
else:
self.genotype_file = open(self.tped_file)
self.filter_missing() |
java | public static <N extends Number> Number average(List<N> numberList) {
return cal(numberList, DoubleStream::average);
} |
python | def _set_least_batch_id(self, txn_signature):
"""Set the first batch id that doesn't have all results.
Args:
txn_signature (str): The txn identifier of the transaction with
results being set.
"""
batch = self._batches_by_txn_id[txn_signature]
least_index = self._index_of_batch(
self._batches_by_id[self._least_batch_id_wo_results].batch)
current_index = self._index_of_batch(batch)
all_prior = False
if current_index <= least_index:
return
# Test to see if all batches from the least_batch to
# the prior batch to the current batch have results.
if all(
all(t.header_signature in self._txn_results
for t in b.transactions)
for b in self._batches[least_index:current_index]):
all_prior = True
if not all_prior:
return
possible_least = self._batches[current_index].header_signature
# Find the first batch from the current batch on, that doesn't have
# all results.
for b in self._batches[current_index:]:
if not all(t.header_signature in self._txn_results
for t in b.transactions):
possible_least = b.header_signature
break
self._least_batch_id_wo_results = possible_least |
java | protected void setProjectEnabled(boolean selection) {
// chkEnableFindBugs.setEnabled(selection);
// chkRunAtFullBuild.setEnabled(selection &&
// chkEnableFindBugs.getSelection());
if (enableProjectCheck != null) {
// this link should always be enabled
//workspaceSettingsLink.setEnabled(!selection);
}
detectorTab.setEnabled(selection);
filterFilesTab.setEnabled(selection);
reportConfigurationTab.setEnabled(selection);
restoreDefaultsButton.setEnabled(selection);
effortViewer.getCombo().setEnabled(selection);
} |
java | @SuppressWarnings("deprecation")
public Element toElement(final ElasticSearchDatastore datastore) {
final Element ds = getDocument().createElement("elasticsearch-datastore");
ds.setAttribute("name", datastore.getName());
if (!Strings.isNullOrEmpty(datastore.getDescription())) {
ds.setAttribute("description", datastore.getDescription());
}
appendElement(ds, "hostname", datastore.getHostname());
appendElement(ds, "port", datastore.getPort());
appendElement(ds, "cluster-name", datastore.getClusterName());
appendElement(ds, "index-name", datastore.getIndexName());
appendElement(ds, "client-type", datastore.getClientType().name());
appendElement(ds, "username", datastore.getUsername());
appendElement(ds, "password", encodePassword(datastore.getPassword()));
appendElement(ds, "ssl", datastore.getSsl());
if (datastore.getSsl()) {
appendElement(ds, "keystore-path", datastore.getKeystorePath());
appendElement(ds, "keystore-password", encodePassword(datastore.getKeystorePassword()));
}
return ds;
} |
java | public SampleSetEQOracle<I, D> add(Word<I> input, D expectedOutput) {
testQueries.add(new DefaultQuery<>(input, expectedOutput));
return this;
} |
python | def touch_multi(self, keys, ttl=0):
"""Touch multiple keys. Multi variant of :meth:`touch`
:param keys: the keys to touch
:type keys: :ref:`iterable<argtypes>`.
``keys`` can also be a dictionary with values being
integers, in which case the value for each key will be used
as the TTL instead of the global one (i.e. the one passed to
this function)
:param int ttl: The new expiration time
:return: A :class:`~.MultiResult` object
Update three keys to expire in 10 seconds ::
cb.touch_multi(("key1", "key2", "key3"), ttl=10)
Update three keys with different expiration times ::
cb.touch_multi({"foo" : 1, "bar" : 5, "baz" : 10})
.. seealso:: :meth:`touch`
"""
return _Base.touch_multi(self, keys, ttl=ttl) |
java | CompletableFuture<Boolean> actualRemoveMaxIdleExpireEntry(K key, V value, long maxIdle, boolean skipLocking) {
CompletableFuture<Boolean> completableFuture = new CompletableFuture<>();
Object expiringObject = expiring.putIfAbsent(key, completableFuture);
if (expiringObject == null) {
if (trace) {
log.tracef("Submitting expiration removal for key %s which had maxIdle of %s", toStr(key), maxIdle);
}
completableFuture.whenComplete((b, t) -> expiring.remove(key, completableFuture));
try {
AdvancedCache<K, V> cacheToUse = skipLocking ? cache.withFlags(Flag.SKIP_LOCKING) : cache;
CompletableFuture<Boolean> expired = cacheToUse.removeMaxIdleExpired(key, value);
expired.whenComplete((b, t) -> {
if (t != null) {
completableFuture.completeExceptionally(t);
} else {
completableFuture.complete(b);
}
});
return completableFuture;
} catch (Throwable t) {
completableFuture.completeExceptionally(t);
throw t;
}
} else if (expiringObject instanceof CompletableFuture) {
// This means there was another thread that found it had expired via max idle
return (CompletableFuture<Boolean>) expiringObject;
} else {
// If it wasn't a CompletableFuture we had a lifespan removal occurring so it will be removed for sure
return CompletableFutures.completedTrue();
}
} |
java | protected List<ISource> locate(ConfigurationWrapper configuration) {
if(configuration == null) {
throw new IllegalArgumentException("A non-null Configuration annotation must be provided");
}
// create resolver from configuration annotation's resolver element
ResolverWrapper resolverWrapper = configuration.resolver();
PropertyResolver resolver = this.resolverFactory.createPropertyResolver(resolverWrapper);
Map<Object,Object> bootstrapMap = this.resolverFactory.getBootstrapProperties(resolverWrapper);
Map<Object,Object> defaultMap = this.resolverFactory.getDefaultProperties(resolverWrapper);
// found sources
List<ISource> foundSources = new ArrayList<ISource>(0);
// create sources
List<Source> sources = new ArrayList<Source>(Arrays.asList(configuration.sources()));
// resolve sources as normal
for(Source source : sources) {
ISource found = this.resloveSource(source, resolver, bootstrapMap, defaultMap);
if(found != null) {
foundSources.add(found);
}
}
// fix no sources found, a source SHOULD always be returned
if(foundSources.isEmpty()) {
foundSources.add(new UnfoundSource());
}
return foundSources;
} |
python | def select_module(self, module_id):
''' Select module and give access to the module.
'''
if not isinstance(module_id, basestring) and isinstance(module_id, Iterable) and set(module_id) - set(self._modules):
raise ValueError('Module IDs invalid:' % ", ".join(set(module_id) - set(self._modules)))
if isinstance(module_id, basestring) and module_id not in self._module_cfgs:
raise ValueError('Module ID "%s" is not valid' % module_id)
if self._current_module_handle is not None:
raise RuntimeError('Module handle "%s" cannot be set because another module is active' % module_id)
if module_id is None:
self._selected_modules = self._modules.keys()
elif not isinstance(module_id, basestring) and isinstance(module_id, Iterable):
self._selected_modules = module_id
elif module_id in self._modules:
self._selected_modules = [module_id]
elif module_id in self._tx_module_groups:
self._selected_modules = self._tx_module_groups[module_id]
else:
RuntimeError('Cannot open files. Module handle "%s" is not valid.' % self.current_module_handle)
# FIFO readout
self._selected_fifos = list(set([module_cfg['FIFO'] for (name, module_cfg) in self._module_cfgs.items() if name in self._selected_modules]))
# Module filter functions dict for quick lookup
self._readout_fifos = []
self._filter = []
self._converter = []
for selected_module_id in self._selected_modules:
module_cfg = self._module_cfgs[selected_module_id]
self._readout_fifos.append(module_cfg['FIFO'])
if 'tdc_channel' not in module_cfg:
tdc_filter = false
self._converter.append(None)
elif module_cfg['tdc_channel'] is None:
tdc_filter = is_tdc_word
self._converter.append(convert_tdc_to_channel(channel=module_cfg['tdc_channel'])) # for the raw data analyzer
else:
tdc_filter = logical_and(is_tdc_word, is_tdc_from_channel(module_cfg['tdc_channel']))
self._converter.append(convert_tdc_to_channel(channel=module_cfg['tdc_channel'])) # for the raw data analyzer
if 'rx_channel' not in module_cfg:
self._filter.append(logical_or(is_trigger_word, tdc_filter))
elif module_cfg['rx_channel'] is None:
self._filter.append(logical_or(is_trigger_word, logical_or(tdc_filter, is_fe_word)))
else:
self._filter.append(logical_or(is_trigger_word, logical_or(tdc_filter, logical_and(is_fe_word, is_data_from_channel(module_cfg['rx_channel'])))))
# select readout channels and report sync status only from actively selected modules
self._enabled_fe_channels = list(set([config['RX'] for (name, config) in self._module_cfgs.items() if name in self._selected_modules]))
# enabling specific TX channels
tx_channels = list(set([1 << config['tx_channel'] for (name, config) in self._module_cfgs.items() if name in self._selected_modules]))
if tx_channels:
self.dut['TX']['OUTPUT_ENABLE'] = reduce(lambda x, y: x | y, tx_channels)
else:
self.dut['TX']['OUTPUT_ENABLE'] = 0
if not isinstance(module_id, basestring) and isinstance(module_id, Iterable):
self._current_module_handle = None
else:
self._current_module_handle = module_id
if module_id is not None and isinstance(module_id, basestring):
current_thread().name = module_id |
java | public static int computeObjectSizeNoTag(Object o) {
int size = 0;
if (o == null) {
return size;
}
Class cls = o.getClass();
Codec target = ProtobufProxy.create(cls);
try {
size = target.size(o);
size = size + CodedOutputStream.computeRawVarint32Size(size);
return size;
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
} |
python | def WriteEventBody(self, event):
"""Writes the body of an event object to the output.
Args:
event (EventObject): event.
"""
if not hasattr(event, 'timestamp'):
return
row = self._GetSanitizedEventValues(event)
try:
self._cursor.execute(self._INSERT_QUERY, row)
except MySQLdb.Error as exception:
logger.warning(
'Unable to insert into database with error: {0!s}.'.format(
exception))
self._count += 1
# TODO: Experiment if committing the current transaction
# every 10000 inserts is the optimal approach.
if self._count % 10000 == 0:
self._connection.commit()
if self._set_status:
self._set_status('Inserting event: {0:d}'.format(self._count)) |
java | private boolean isValidH2Request(HashMap<String, String> pseudoHeaders) {
if (MethodValues.CONNECT.getName().equals(pseudoHeaders.get(HpackConstants.METHOD))) {
if (pseudoHeaders.get(HpackConstants.PATH) == null && pseudoHeaders.get(HpackConstants.SCHEME) == null
&& pseudoHeaders.get(HpackConstants.AUTHORITY) != null) {
this.isConnectStream = true;
return true;
}
return false;
}
if (pseudoHeaders.get(HpackConstants.METHOD) != null && pseudoHeaders.get(HpackConstants.PATH) != null &&
pseudoHeaders.get(HpackConstants.SCHEME) != null) {
return true;
}
return false;
} |
java | @Override
public Short unmarshal(String object) {
return Short.valueOf(Short.parseShort(object));
} |
java | public Object replace(CacheKey key, Object value)
{
return parentCache.replace(key, value);
} |
java | public void initializeStringTable() {
stringTable = new byte[4096][];
for (int i=0; i<256; i++) {
stringTable[i] = new byte[1];
stringTable[i][0] = (byte)i;
}
tableIndex = 258;
bitsToGet = 9;
} |
java | public void update() {
embeddedAn = (Embedded) getAnnotation(Embedded.class);
entityAn = (Entity) getFirstAnnotation(Entity.class);
// polymorphicAn = (Polymorphic) getAnnotation(Polymorphic.class);
final List<MappedField> fields = getFieldsAnnotatedWith(Id.class);
if (fields != null && !fields.isEmpty()) {
idField = fields.get(0).getField();
}
} |
java | private static TimeZoneRule createRuleByRRULE(String tzname,
int rawOffset, int dstSavings, long start, List<String> dates, int fromOffset) {
if (dates == null || dates.size() == 0) {
return null;
}
// Parse the first rule
String rrule = dates.get(0);
long until[] = new long[1];
int[] ruleFields = parseRRULE(rrule, until);
if (ruleFields == null) {
// Invalid RRULE
return null;
}
int month = ruleFields[0];
int dayOfWeek = ruleFields[1];
int nthDayOfWeek = ruleFields[2];
int dayOfMonth = ruleFields[3];
if (dates.size() == 1) {
// No more rules
if (ruleFields.length > 4) {
// Multiple BYMONTHDAY values
if (ruleFields.length != 10 || month == -1 || dayOfWeek == 0) {
// Only support the rule using 7 continuous days
// BYMONTH and BYDAY must be set at the same time
return null;
}
int firstDay = 31; // max possible number of dates in a month
int days[] = new int[7];
for (int i = 0; i < 7; i++) {
days[i] = ruleFields[3 + i];
// Resolve negative day numbers. A negative day number should
// not be used in February, but if we see such case, we use 28
// as the base.
days[i] = days[i] > 0 ? days[i] : MONTHLENGTH[month] + days[i] + 1;
firstDay = days[i] < firstDay ? days[i] : firstDay;
}
// Make sure days are continuous
for (int i = 1; i < 7; i++) {
boolean found = false;
for (int j = 0; j < 7; j++) {
if (days[j] == firstDay + i) {
found = true;
break;
}
}
if (!found) {
// days are not continuous
return null;
}
}
// Use DOW_GEQ_DOM rule with firstDay as the start date
dayOfMonth = firstDay;
}
} else {
// Check if BYMONTH + BYMONTHDAY + BYDAY rule with multiple RRULE lines.
// Otherwise, not supported.
if (month == -1 || dayOfWeek == 0 || dayOfMonth == 0) {
// This is not the case
return null;
}
// Parse the rest of rules if number of rules is not exceeding 7.
// We can only support 7 continuous days starting from a day of month.
if (dates.size() > 7) {
return null;
}
// Note: To check valid date range across multiple rule is a little
// bit complicated. For now, this code is not doing strict range
// checking across month boundary
int earliestMonth = month;
int daysCount = ruleFields.length - 3;
int earliestDay = 31;
for (int i = 0; i < daysCount; i++) {
int dom = ruleFields[3 + i];
dom = dom > 0 ? dom : MONTHLENGTH[month] + dom + 1;
earliestDay = dom < earliestDay ? dom : earliestDay;
}
int anotherMonth = -1;
for (int i = 1; i < dates.size(); i++) {
rrule = dates.get(i);
long[] unt = new long[1];
int[] fields = parseRRULE(rrule, unt);
// If UNTIL is newer than previous one, use the one
if (unt[0] > until[0]) {
until = unt;
}
// Check if BYMONTH + BYMONTHDAY + BYDAY rule
if (fields[0] == -1 || fields[1] == 0 || fields[3] == 0) {
return null;
}
// Count number of BYMONTHDAY
int count = fields.length - 3;
if (daysCount + count > 7) {
// We cannot support BYMONTHDAY more than 7
return null;
}
// Check if the same BYDAY is used. Otherwise, we cannot
// support the rule
if (fields[1] != dayOfWeek) {
return null;
}
// Check if the month is same or right next to the primary month
if (fields[0] != month) {
if (anotherMonth == -1) {
int diff = fields[0] - month;
if (diff == -11 || diff == -1) {
// Previous month
anotherMonth = fields[0];
earliestMonth = anotherMonth;
// Reset earliest day
earliestDay = 31;
} else if (diff == 11 || diff == 1) {
// Next month
anotherMonth = fields[0];
} else {
// The day range cannot exceed more than 2 months
return null;
}
} else if (fields[0] != month && fields[0] != anotherMonth) {
// The day range cannot exceed more than 2 months
return null;
}
}
// If ealier month, go through days to find the earliest day
if (fields[0] == earliestMonth) {
for (int j = 0; j < count; j++) {
int dom = fields[3 + j];
dom = dom > 0 ? dom : MONTHLENGTH[fields[0]] + dom + 1;
earliestDay = dom < earliestDay ? dom : earliestDay;
}
}
daysCount += count;
}
if (daysCount != 7) {
// Number of BYMONTHDAY entries must be 7
return null;
}
month = earliestMonth;
dayOfMonth = earliestDay;
}
// Calculate start/end year and missing fields
int[] dfields = Grego.timeToFields(start + fromOffset, null);
int startYear = dfields[0];
if (month == -1) {
// If MYMONTH is not set, use the month of DTSTART
month = dfields[1];
}
if (dayOfWeek == 0 && nthDayOfWeek == 0 && dayOfMonth == 0) {
// If only YEARLY is set, use the day of DTSTART as BYMONTHDAY
dayOfMonth = dfields[2];
}
int timeInDay = dfields[5];
int endYear = AnnualTimeZoneRule.MAX_YEAR;
if (until[0] != MIN_TIME) {
Grego.timeToFields(until[0], dfields);
endYear = dfields[0];
}
// Create the AnnualDateTimeRule
DateTimeRule adtr = null;
if (dayOfWeek == 0 && nthDayOfWeek == 0 && dayOfMonth != 0) {
// Day in month rule, for example, 15th day in the month
adtr = new DateTimeRule(month, dayOfMonth, timeInDay, DateTimeRule.WALL_TIME);
} else if (dayOfWeek != 0 && nthDayOfWeek != 0 && dayOfMonth == 0) {
// Nth day of week rule, for example, last Sunday
adtr = new DateTimeRule(month, nthDayOfWeek, dayOfWeek, timeInDay, DateTimeRule.WALL_TIME);
} else if (dayOfWeek != 0 && nthDayOfWeek == 0 && dayOfMonth != 0) {
// First day of week after day of month rule, for example,
// first Sunday after 15th day in the month
adtr = new DateTimeRule(month, dayOfMonth, dayOfWeek, true, timeInDay, DateTimeRule.WALL_TIME);
} else {
// RRULE attributes are insufficient
return null;
}
return new AnnualTimeZoneRule(tzname, rawOffset, dstSavings, adtr, startYear, endYear);
} |
java | protected URI createUri(FileStatus i, UnixUserGroupInformation ugi,
ClientProtocol nnproxy, HttpServletRequest request)
throws IOException, URISyntaxException {
return createUri(i.getPath().toString(),
pickSrcDatanode(i, nnproxy), ugi, request);
} |
java | @Override
public <H> Choice8<A, B, C, D, E, F, G, H> diverge() {
return match(Choice8::a, Choice8::b, Choice8::c, Choice8::d, Choice8::e, Choice8::f, Choice8::g);
} |
java | @SuppressWarnings(UNUSED)
@CheckForNull
@Exported
public Run getFirstBuild() {
Run retVal = null;
for (Job job : getAllJobs()) {
Run run = job.getFirstBuild();
if (run != null && (retVal == null || run.getTimestamp().before(retVal.getTimestamp()))) {
retVal = run;
}
}
return retVal;
} |
python | def delete(self, session, commit=True, soft=True):
"""
Delete a row from the DB.
:param session: flask_sqlalchemy session object
:param commit: whether to issue the commit
:param soft: whether this is a soft delete (i.e., update time_removed)
"""
if soft:
self.time_removed = sqlalchemy.func.unix_timestamp()
else:
session.delete(self)
if commit:
session.commit() |
java | private void initializeMeta() throws BlockAlreadyExistsException, IOException,
WorkerOutOfSpaceException {
// Create the storage directory path
boolean isDirectoryNewlyCreated = FileUtils.createStorageDirPath(mDirPath,
ServerConfiguration.get(PropertyKey.WORKER_DATA_FOLDER_PERMISSIONS));
if (isDirectoryNewlyCreated) {
LOG.info("Folder {} was created!", mDirPath);
}
File dir = new File(mDirPath);
File[] paths = dir.listFiles();
if (paths == null) {
return;
}
for (File path : paths) {
if (!path.isFile()) {
LOG.error("{} in StorageDir is not a file", path.getAbsolutePath());
try {
// TODO(calvin): Resolve this conflict in class names.
org.apache.commons.io.FileUtils.deleteDirectory(path);
} catch (IOException e) {
LOG.error("can not delete directory {}", path.getAbsolutePath(), e);
}
} else {
try {
long blockId = Long.parseLong(path.getName());
addBlockMeta(new BlockMeta(blockId, path.length(), this));
} catch (NumberFormatException e) {
LOG.error("filename of {} in StorageDir can not be parsed into long",
path.getAbsolutePath(), e);
if (path.delete()) {
LOG.warn("file {} has been deleted", path.getAbsolutePath());
} else {
LOG.error("can not delete file {}", path.getAbsolutePath());
}
}
}
}
} |
java | public static RawMessage toRawMessage(final short sendingNodeId, final ByteBuffer buffer) {
buffer.flip();
final RawMessage message = new RawMessage(buffer.limit());
message.put(buffer, false);
buffer.clear();
final RawMessageHeader header = new RawMessageHeader(sendingNodeId, (short) 0, (short) message.length());
message.header(header);
return message;
} |
python | def randomly_init_variable(tax_benefit_system, input_dataframe_by_entity, variable_name, max_value, condition = None, seed = None):
"""
Initialise a variable with random values (from 0 to max_value).
If a condition vector is provided, only set the value of persons or groups for which condition is True.
Exemple:
>>> from openfisca_survey_manager.input_dataframe_generator import make_input_dataframe_by_entity
>>> from openfisca_country_template import CountryTaxBenefitSystem
>>> tbs = CountryTaxBenefitSystem()
>>> input_dataframe_by_entity = make_input_dataframe_by_entity(tbs, 400, 100)
>>> randomly_init_variable(tbs, input_dataframe_by_entity, 'salary', max_value = 50000, condition = "household_role == 'first_parent'") # Randomly set a salaire_net for all persons between 0 and 50000?
>>> sorted(input_dataframe_by_entity['person'].columns.tolist())
['household_id', 'household_legacy_role', 'household_role', 'person_id', 'salary']
>>> input_dataframe_by_entity['person'].salary.max() <= 50000
True
>>> len(input_dataframe_by_entity['person'].salary)
400
>>> randomly_init_variable(tbs, input_dataframe_by_entity, 'rent', max_value = 1000)
>>> sorted(input_dataframe_by_entity['household'].columns.tolist())
['rent']
>>> input_dataframe_by_entity['household'].rent.max() <= 1000
True
>>> input_dataframe_by_entity['household'].rent.max() >= 1
True
>>> len(input_dataframe_by_entity['household'].rent)
100
"""
variable = tax_benefit_system.variables[variable_name]
entity = variable.entity
if condition is None:
condition = True
else:
condition = input_dataframe_by_entity[entity.key].eval(condition).values
if seed is None:
seed = 42
np.random.seed(seed)
count = len(input_dataframe_by_entity[entity.key])
value = (np.random.rand(count) * max_value * condition).astype(variable.dtype)
input_dataframe_by_entity[entity.key][variable_name] = value |
java | public static String setProperty(ArgumentUnit argumentUnit, String propertyName,
String propertyValue)
{
Properties properties = ArgumentUnitUtils.getProperties(argumentUnit);
String result = (String) properties.setProperty(propertyName, propertyValue);
ArgumentUnitUtils.setProperties(argumentUnit, properties);
return result;
} |
java | public String queryString(String sql, Object... params) throws SQLException {
return query(sql, new StringHandler(), params);
} |
python | def setCenter(self, loc):
""" Move this region so it is centered on ``loc`` """
offset = self.getCenter().getOffset(loc) # Calculate offset from current center
return self.setLocation(self.getTopLeft().offset(offset)) # Move top left corner by the same offset |
java | @Override
public double distance(NumberVector v1, NumberVector v2) {
return 1 - PearsonCorrelation.weightedCoefficient(v1, v2, weights);
} |
java | @SuppressWarnings("resource")
protected ISynchronizationPoint<? extends Exception> serializeInputStreamValue(
SerializationContext context, InputStream in, String path, List<SerializationRule> rules) {
return serializeIOReadableValue(context, new IOFromInputStream(in, in.toString(), Threading.getUnmanagedTaskManager(), priority),
path, rules);
} |
java | private void removeRRInstance(PackingPlanBuilder packingPlanBuilder,
String componentName) throws RuntimeException {
List<Scorer<Container>> scorers = new ArrayList<>();
scorers.add(new HomogeneityScorer(componentName, true)); // all-same-component containers first
scorers.add(new InstanceCountScorer()); // then fewest instances
scorers.add(new HomogeneityScorer(componentName, false)); // then most homogeneous
scorers.add(new ContainerIdScorer(false)); // then highest container id
this.containerId = nextContainerId(packingPlanBuilder.removeInstance(scorers, componentName));
} |
python | def as_int(self, method):
"""
This value as an int, rounded according to ``method``.
:param method: rounding method
:raises BasesValueError: on bad parameters
:returns: corresponding int value
:rtype: int
"""
(new_radix, relation) = self.rounded(0, method)
value = Nats.convert_to_int(new_radix.integer_part, new_radix.base)
return (value * self.sign, relation) |
python | def gc(cn, ns=None, lo=None, iq=None, ico=None, pl=None):
"""
This function is a wrapper for
:meth:`~pywbem.WBEMConnection.GetClass`.
Retrieve a class.
Parameters:
cn (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be retrieved (case independent).
If specified as a `CIMClassName` object, its `host` attribute will be
ignored.
ns (:term:`string`):
Name of the CIM namespace to be used (case independent).
If `None`, defaults to the namespace of the `cn` parameter if
specified as a `CIMClassName`, or to the default namespace of the
connection.
lo (:class:`py:bool`):
LocalOnly flag: Exclude inherited properties.
`None` will cause the server default of `True` to be used.
iq (:class:`py:bool`):
IncludeQualifiers flag: Include qualifiers.
`None` will cause the server default of `True` to be used.
ico (:class:`py:bool`):
IncludeClassOrigin flag: Include class origin information for
properties and methods in the retrieved class.
`None` will cause the server default of `False` to be used.
pl (:term:`string` or :term:`py:iterable` of :term:`string`):
PropertyList: Names of properties to be included (if not otherwise
excluded). An empty iterable indicates to include no properties.
If `None`, all properties will be included.
Returns:
:class:`~pywbem.CIMClass`:
The retrieved class.
"""
return CONN.GetClass(cn, ns,
LocalOnly=lo,
IncludeQualifiers=iq,
IncludeClassOrigin=ico,
PropertyList=pl) |
java | public static CompletableFuture<IMessageReceiver> createMessageReceiverFromEntityPathAsync(MessagingFactory messagingFactory, String entityPath) {
return createMessageReceiverFromEntityPathAsync(messagingFactory, entityPath, DEFAULTRECEIVEMODE);
} |
java | public static void showSummary(String projectName, Dependency[] dependencies) {
final StringBuilder summary = new StringBuilder();
for (Dependency d : dependencies) {
final String ids = d.getVulnerabilities(true).stream()
.map(v -> v.getName())
.collect(Collectors.joining(", "));
if (ids.length() > 0) {
summary.append(d.getFileName()).append(" (");
summary.append(Stream.concat(d.getSoftwareIdentifiers().stream(), d.getVulnerableSoftwareIdentifiers().stream())
.map(i -> i.getValue())
.collect(Collectors.joining(", ")));
summary.append(") : ").append(ids).append(NEW_LINE);
}
}
if (summary.length() > 0) {
if (projectName == null || projectName.isEmpty()) {
LOGGER.warn("\n\nOne or more dependencies were identified with known vulnerabilities:\n\n{}\n\n"
+ "See the dependency-check report for more details.\n\n",
summary.toString());
} else {
LOGGER.warn("\n\nOne or more dependencies were identified with known vulnerabilities in {}:\n\n{}\n\n"
+ "See the dependency-check report for more details.\n\n",
projectName,
summary.toString());
}
}
} |
python | def compatible(self, a, b):
"""Return `True` if type *a* is compatible with type *b*."""
return len(set([a] + self.descendants(a))
.intersection([b] + self.descendants(b))) > 0 |
java | @Override
public Node visitQuery(SqlBaseParser.QueryContext context)
{
Query body = (Query) visit(context.queryNoWith());
return new Query(
getLocation(context),
visitIfPresent(context.with(), With.class),
body.getQueryBody(),
body.getOrderBy(),
body.getLimit());
} |
java | @SuppressWarnings("unchecked")
public void reduceToKeys(Set<String> keys) {
if (size > 0) {
int sizeCopy = size;
String[] keyListCopy = keyList.clone();
int[] errorNumberCopy = errorNumber.clone();
HashMap<String, Integer>[] errorListCopy = errorList.clone();
int[] sourceNumberListCopy = sourceNumberList.clone();
T1[] advancedValueSumListCopy = advancedValueSumList.clone();
T1[] advancedValueMaxListCopy = advancedValueMaxList.clone();
T1[] advancedValueMinListCopy = advancedValueMinList.clone();
T1[] advancedValueSumOfSquaresListCopy = advancedValueSumOfSquaresList
.clone();
T2[] advancedValueSumOfLogsListCopy = advancedValueSumOfLogsList.clone();
long[] advancedValueNListCopy = advancedValueNList.clone();
keyList = new String[keys.size()];
errorNumber = new int[keys.size()];
errorList = new HashMap[keys.size()];
sourceNumberList = new int[keys.size()];
advancedValueSumList = operations.createVector1(keys.size());
advancedValueMaxList = operations.createVector1(keys.size());
advancedValueMinList = operations.createVector1(keys.size());
advancedValueSumOfSquaresList = operations.createVector1(keys.size());
advancedValueSumOfLogsList = operations.createVector2(keys.size());
advancedValueNList = new long[keys.size()];
size = 0;
for (int i = 0; i < sizeCopy; i++) {
if (keys.contains(keyListCopy[i])) {
keyList[size] = keyListCopy[i];
errorNumber[size] = errorNumberCopy[i];
errorList[size] = errorListCopy[i];
sourceNumberList[size] = sourceNumberListCopy[i];
advancedValueSumList[size] = advancedValueSumListCopy[i];
advancedValueMaxList[size] = advancedValueMaxListCopy[i];
advancedValueMinList[size] = advancedValueMinListCopy[i];
advancedValueSumOfSquaresList[size] = advancedValueSumOfSquaresListCopy[i];
advancedValueSumOfLogsList[size] = advancedValueSumOfLogsListCopy[i];
advancedValueNList[size] = advancedValueNListCopy[i];
size++;
}
}
}
} |
java | protected void readFileData(final ParserData parserData, final BufferedReader br) throws IOException {
// Read in the entire file so we can peek ahead later on
String line;
while ((line = br.readLine()) != null) {
parserData.addLine(line);
}
} |
java | public void marshall(UpdateFacetRequest updateFacetRequest, ProtocolMarshaller protocolMarshaller) {
if (updateFacetRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateFacetRequest.getSchemaArn(), SCHEMAARN_BINDING);
protocolMarshaller.marshall(updateFacetRequest.getName(), NAME_BINDING);
protocolMarshaller.marshall(updateFacetRequest.getAttributeUpdates(), ATTRIBUTEUPDATES_BINDING);
protocolMarshaller.marshall(updateFacetRequest.getObjectType(), OBJECTTYPE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def process_from_file(signor_data_file, signor_complexes_file=None):
"""Process Signor interaction data from CSV files.
Parameters
----------
signor_data_file : str
Path to the Signor interaction data file in CSV format.
signor_complexes_file : str
Path to the Signor complexes data in CSV format. If unspecified,
Signor complexes will not be expanded to their constitutents.
Returns
-------
indra.sources.signor.SignorProcessor
SignorProcessor containing Statements extracted from the Signor data.
"""
# Get generator over the CSV file
data_iter = read_unicode_csv(signor_data_file, delimiter=';', skiprows=1)
complexes_iter = None
if signor_complexes_file:
complexes_iter = read_unicode_csv(signor_complexes_file, delimiter=';',
skiprows=1)
else:
logger.warning('Signor complex mapping file not provided, Statements '
'involving complexes will not be expanded to members.')
return _processor_from_data(data_iter, complexes_iter) |
python | def get_decrpyted_path(encrypted_path, surfix=default_surfix):
"""
Find the original path of encrypted file or dir.
Example:
- file: ``${home}/test-encrypted.txt`` -> ``${home}/test.txt``
- dir: ``${home}/Documents-encrypted`` -> ``${home}/Documents``
"""
surfix_reversed = surfix[::-1]
p = Path(encrypted_path).absolute()
fname = p.fname
fname_reversed = fname[::-1]
new_fname = fname_reversed.replace(surfix_reversed, "", 1)[::-1]
decrypted_p = p.change(new_fname=new_fname)
return decrypted_p.abspath |
python | def get_default_currency(self) -> Commodity:
""" returns the book default currency """
result = None
if self.default_currency:
result = self.default_currency
else:
def_currency = self.__get_default_currency()
self.default_currency = def_currency
result = def_currency
return result |
java | @BetaApi
public final Operation deleteRegionDisk(String disk) {
DeleteRegionDiskHttpRequest request =
DeleteRegionDiskHttpRequest.newBuilder().setDisk(disk).build();
return deleteRegionDisk(request);
} |
java | @Nonnull
private static File writeDataToTempFileOrDie(@Nonnull final OutputStreamCallback callback,
@Nonnull final File targetFile,
@Nonnull final Logger log) throws IOException {
Preconditions.checkNotNull(callback, "callback argument is required!");
Preconditions.checkNotNull(log, "log argument is required!");
Preconditions.checkNotNull(targetFile, "targetFile argument is required!");
FileOutputStream fileOut = null;
FileChannel fileChannel = null;
try {
final String targetFinalName = targetFile.getName();
final File targetDirectory = targetFile.getParentFile();
// open temporary file
final File tmpFile = File.createTempFile(targetFinalName, ".tmp", targetDirectory);
fileOut = new FileOutputStream(tmpFile);
fileChannel = fileOut.getChannel();
// make sure to use an output stream that flows THROUGH the FileChannel, so that FileChannel.force(true)
// can do what it's supposed to
// write the data AND flush it
callback.writeAndFlushData(Channels.newOutputStream(fileChannel));
return tmpFile;
} finally {
try {
// fsync to disk (both data AND length)
if (fileChannel != null) {
fileChannel.force(true);
}
} finally {
// close the open file (if during an EXC,
if (fileOut != null) {
fileOut.close();
}
}
}
} |
java | private long parseTime(String day, String hours)
{
DateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
Date utcDate = new Date();
try {
utcDate = format.parse(day.concat(" " + hours));
} catch (Exception e) {
}
return utcDate.getTime();
} |
java | private void checkInitialized()
throws StorageException {
if (storageAccountManager == null || !storageAccountManager.isInitialized()) {
String error = "DuraStore must be initialized with an XML file " +
"containing storage account information before any " +
"further requests can be fulfilled.";
throw new StorageException(error);
}
} |
java | public static <T> T use(Object self, Class categoryClass, Closure<T> closure) {
return GroovyCategorySupport.use(categoryClass, closure);
} |
java | public static double innerProduct(double[] a, double[] b) {
double result = 0.0;
int len = Math.min(a.length, b.length);
for (int i = 0; i < len; i++) {
result += a[i] * b[i];
}
return result;
} |
java | public void buildMethodDescription(XMLNode node, Content methodsContentTree) {
methodWriter.addMemberDescription((MethodDoc) currentMember, methodsContentTree);
} |
java | public boolean dependsOn (T elem1, T elem2)
{
DependencyNode<T> node1 = _nodes.get(elem1);
DependencyNode<T> node2 = _nodes.get(elem2);
List<DependencyNode<T>> nodesToCheck = new ArrayList<DependencyNode<T>>();
List<DependencyNode<T>> nodesAlreadyChecked = new ArrayList<DependencyNode<T>>();
nodesToCheck.addAll(node1.parents);
// We prevent circular dependencies when we add dependencies. Otherwise, this'd be
// potentially non-terminating.
while (!nodesToCheck.isEmpty()) {
// We take it off the end since we don't care about order and this is faster.
DependencyNode<T> checkNode = nodesToCheck.remove(nodesToCheck.size() - 1);
if (nodesAlreadyChecked.contains(checkNode)) {
// We've seen him before, no need to check again.
continue;
} else if (checkNode == node2) {
// We've found our dependency
return true;
} else {
nodesAlreadyChecked.add(checkNode);
nodesToCheck.addAll(checkNode.parents);
}
}
return false;
} |
python | def available(name):
'''
Returns ``True`` if the specified service is available, otherwise returns
``False``.
We look up the name with the svcs command to get back the FMRI
This allows users to use simpler service names
CLI Example:
.. code-block:: bash
salt '*' service.available net-snmp
'''
cmd = '/usr/bin/svcs -H -o FMRI {0}'.format(name)
name = __salt__['cmd.run'](cmd, python_shell=False)
return name in get_all() |
java | protected void afterMaterialization()
{
if (_listeners != null)
{
MaterializationListener listener;
// listeners may remove themselves during the afterMaterialization
// callback.
// thus we must iterate through the listeners vector from back to
// front
// to avoid index problems.
for (int idx = _listeners.size() - 1; idx >= 0; idx--)
{
listener = (MaterializationListener) _listeners.get(idx);
listener.afterMaterialization(this, _realSubject);
}
}
} |
java | public String get(Property<String> property) throws ConfigurationException {
return tryGet(property, s -> s);
} |
python | def quote_for_pydot(string):
"""
takes a string (or int) and encloses it with "-chars. if the string
contains "-chars itself, they will be escaped.
"""
if isinstance(string, int):
string = str(string)
escaped_str = QUOTE_RE.sub(r'\\"', string)
return u'"{}"'.format(escaped_str) |
python | def list_records(self, domain, limit=None, offset=None):
"""
Returns a list of all records configured for the specified domain.
"""
return domain.list_records(limit=limit, offset=offset) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.