language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | private ObjectNode createResponseSuccess(String jsonRpc, Object id, JsonNode result) {
ObjectNode response = mapper.createObjectNode();
response.put(JSONRPC, jsonRpc);
if (Integer.class.isInstance(id)) {
response.put(ID, Integer.class.cast(id).intValue());
} else if (Long.class.isInstance(id)) {
response.put(ID, Long.class.cast(id).longValue());
} else if (Float.class.isInstance(id)) {
response.put(ID, Float.class.cast(id).floatValue());
} else if (Double.class.isInstance(id)) {
response.put(ID, Double.class.cast(id).doubleValue());
} else if (BigDecimal.class.isInstance(id)) {
response.put(ID, BigDecimal.class.cast(id));
} else {
response.put(ID, String.class.cast(id));
}
response.set(RESULT, result);
return response;
} |
python | def fastgapfill(model_extended, core, solver, weights={}, epsilon=1e-5):
"""Run FastGapFill gap-filling algorithm by calling
:func:`psamm.fastcore.fastcore`.
FastGapFill will try to find a minimum subset of reactions that includes
the core reactions and it also has no blocked reactions.
Return the set of reactions in the minimum subset. An extended model that
includes artificial transport and exchange reactions can be generated by
calling :func:`.create_extended_model`.
Args:
model: :class:`psamm.metabolicmodel.MetabolicModel`.
core: reactions in the original metabolic model.
weights: a weight dictionary for reactions in the model.
solver: linear programming library to use.
epsilon: float number, threshold for Fastcore algorithm.
"""
# Run Fastcore and print the induced reaction set
logger.info('Calculating Fastcore induced set on model')
induced = fastcore(
model_extended, core, epsilon=epsilon, weights=weights, solver=solver)
logger.debug('Result: |A| = {}, A = {}'.format(len(induced), induced))
added_reactions = induced - core
logger.debug('Extended: |E| = {}, E = {}'.format(
len(added_reactions), added_reactions))
return induced |
python | def compute_tab_title(self, vte):
"""Abbreviate and cut vte terminal title when necessary
"""
vte_title = vte.get_window_title() or _("Terminal")
try:
current_directory = vte.get_current_directory()
if self.abbreviate and vte_title.endswith(current_directory):
parts = current_directory.split('/')
parts = [s[:1] for s in parts[:-1]] + [parts[-1]]
vte_title = vte_title[:len(vte_title) - len(current_directory)] + '/'.join(parts)
except OSError:
pass
return TabNameUtils.shorten(vte_title, self.settings) |
java | private void reportError(StorageDirectory sd) {
if (storage instanceof NNStorage) {
// pass null, since we handle the disable here
((NNStorage)storage).reportErrorsOnDirectory(sd, null);
} else {
LOG.error("Failed direcory: " + sd.getCurrentDir());
}
} |
java | public final void usage(String commandName, StringBuilder out, String indent) {
String description = getCommandDescription(commandName);
JCommander jc = commander.findCommandByAlias(commandName);
if (description != null) {
out.append(indent).append(description);
out.append("\n");
}
jc.getUsageFormatter().usage(out, indent);
} |
java | @DefaultVisibilityForTesting
static TagsComponent loadTagsComponent(@Nullable ClassLoader classLoader) {
try {
// Call Class.forName with literal string name of the class to help shading tools.
return Provider.createInstance(
Class.forName(
"io.opencensus.impl.tags.TagsComponentImpl", /*initialize=*/ true, classLoader),
TagsComponent.class);
} catch (ClassNotFoundException e) {
logger.log(
Level.FINE,
"Couldn't load full implementation for TagsComponent, now trying to load lite "
+ "implementation.",
e);
}
try {
// Call Class.forName with literal string name of the class to help shading tools.
return Provider.createInstance(
Class.forName(
"io.opencensus.impllite.tags.TagsComponentImplLite",
/*initialize=*/ true,
classLoader),
TagsComponent.class);
} catch (ClassNotFoundException e) {
logger.log(
Level.FINE,
"Couldn't load lite implementation for TagsComponent, now using "
+ "default implementation for TagsComponent.",
e);
}
return NoopTags.newNoopTagsComponent();
} |
java | public <T> MutateInBuilder insert(String path, T fragment, SubdocOptionsBuilder optionsBuilder) {
asyncBuilder.insert(path, fragment, optionsBuilder);
return this;
} |
java | @SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jTextField1 = new javax.swing.JTextField();
jLabel2 = new javax.swing.JLabel();
jTextField2 = new javax.swing.JTextField();
jLabel3 = new javax.swing.JLabel();
jTextField3 = new javax.swing.JTextField();
jLabel4 = new javax.swing.JLabel();
jTextField4 = new javax.swing.JTextField();
jCalendarButton1 = new JCalendarButton();
jTimeButton1 = new JTimeButton();
jCalendarButton2 = new JCalendarButton();
jTimeButton2 = new JTimeButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jLabel1.setText("Name:");
jLabel2.setText("Date:");
jTextField2.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusLost(java.awt.event.FocusEvent evt) {
dateFocusLost(evt);
}
});
jLabel3.setText("Time:");
jTextField3.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusLost(java.awt.event.FocusEvent evt) {
timeFocusLost(evt);
}
});
jLabel4.setText("Date and Time:");
jTextField4.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusLost(java.awt.event.FocusEvent evt) {
dateTimeFocusLost(evt);
}
});
jCalendarButton1.addPropertyChangeListener(new java.beans.PropertyChangeListener() {
public void propertyChange(java.beans.PropertyChangeEvent evt) {
dateOnlyPopupChanged(evt);
}
});
jTimeButton1.addPropertyChangeListener(new java.beans.PropertyChangeListener() {
public void propertyChange(java.beans.PropertyChangeEvent evt) {
timeOnlyPopupChanged(evt);
}
});
jCalendarButton2.addPropertyChangeListener(new java.beans.PropertyChangeListener() {
public void propertyChange(java.beans.PropertyChangeEvent evt) {
datePopupChanged(evt);
}
});
jTimeButton2.addPropertyChangeListener(new java.beans.PropertyChangeListener() {
public void propertyChange(java.beans.PropertyChangeEvent evt) {
timePopupChanged(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(26, 26, 26)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2)
.addComponent(jLabel1)
.addComponent(jLabel3)
.addComponent(jLabel4))
.addGap(107, 107, 107)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jTextField1, javax.swing.GroupLayout.DEFAULT_SIZE, 226, Short.MAX_VALUE)
.addComponent(jTextField4)
.addComponent(jTextField3)
.addComponent(jTextField2, javax.swing.GroupLayout.DEFAULT_SIZE, 130, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jCalendarButton1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jTimeButton1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGroup(layout.createSequentialGroup()
.addComponent(jCalendarButton2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jTimeButton2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(33, 33, 33)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(jTextField2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(jTextField3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jTimeButton1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(jTextField4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jCalendarButton2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(layout.createSequentialGroup()
.addGap(60, 60, 60)
.addComponent(jCalendarButton1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(30, 30, 30)
.addComponent(jTimeButton2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(53, Short.MAX_VALUE))
);
pack();
} |
java | @Override
protected void visitFunctionNode(FunctionNode node) {
// Cannot simplify nonplugin functions.
// TODO(brndn): we can actually simplify checkNotNull.
if (node.getSoyFunction() instanceof BuiltinFunction) {
return;
}
if (node.getSoyFunction() instanceof LoggingFunction) {
return;
}
// Default to fallback implementation.
visitExprNode(node);
} |
python | def is_series(data):
"""
Checks whether the supplied data is of Series type.
"""
dd = None
if 'dask' in sys.modules:
import dask.dataframe as dd
return((pd is not None and isinstance(data, pd.Series)) or
(dd is not None and isinstance(data, dd.Series))) |
java | public static JwtClaims getDefaultJwtClaims() {
JwtClaims claims = new JwtClaims();
claims.setIssuer(jwtConfig.getIssuer());
claims.setAudience(jwtConfig.getAudience());
claims.setExpirationTimeMinutesInTheFuture(jwtConfig.getExpiredInMinutes());
claims.setGeneratedJwtId(); // a unique identifier for the token
claims.setIssuedAtToNow(); // when the token was issued/created (now)
claims.setNotBeforeMinutesInThePast(2); // time before which the token is not yet valid (2 minutes ago)
claims.setClaim("version", jwtConfig.getVersion());
return claims;
} |
java | public void writeOverSet(int which, List<Point2D_I32> points) {
BlockIndexLength set = sets.get(which);
if( set.length != points.size() )
throw new IllegalArgumentException("points and set don't have the same length");
for (int i = 0; i < set.length; i++) {
int index = set.start + i*2;
int blockIndex = set.block + index/blockLength;
index %= blockLength;
Point2D_I32 p = points.get(i);
int block[] = blocks.get( blockIndex );
block[index] = p.x;
block[index+1] = p.y;
}
} |
python | def rescale(curves, values):
"""
Multiply the losses in each curve of kind (losses, poes) by the
corresponding value.
:param curves: an array of shape (A, 2, C)
:param values: an array of shape (A,)
"""
A, _, C = curves.shape
assert A == len(values), (A, len(values))
array = numpy.zeros((A, C), loss_poe_dt)
array['loss'] = [c * v for c, v in zip(curves[:, 0], values)]
array['poe'] = curves[:, 1]
return array |
python | def _fill_text(self, text, width, indent):
"""Wraps text like HelpFormatter, but doesn't squash lines
This makes it easier to do lists and paragraphs.
"""
parts = text.split('\n\n')
for i, part in enumerate(parts):
# Check to see if it's a bulleted list--if so, then fill each line
if part.startswith('* '):
subparts = part.split('\n')
for j, subpart in enumerate(subparts):
subparts[j] = super(WrappedTextHelpFormatter, self)._fill_text(
subpart, width, indent
)
parts[i] = '\n'.join(subparts)
else:
parts[i] = super(WrappedTextHelpFormatter, self)._fill_text(part, width, indent)
return '\n\n'.join(parts) |
java | @Override
public final void setLang(final Languages pLang) {
this.lang = pLang;
if (this.itsId == null) {
this.itsId = new IdI18nCurrency();
}
this.itsId.setLang(this.lang);
} |
java | private static boolean valueConstantsMatch(AbstractExpression e1, AbstractExpression e2) {
return (e1 instanceof ParameterValueExpression && e2 instanceof ConstantValueExpression ||
e1 instanceof ConstantValueExpression && e2 instanceof ParameterValueExpression) &&
equalsAsCVE(e1, e2);
} |
java | protected IObjectPickler getCustomPickler(Class<?> t) {
IObjectPickler pickler = customPicklers.get(t);
if(pickler!=null) {
return pickler; // exact match
}
// check if there's a custom pickler registered for an interface or abstract base class
// that this object implements or inherits from.
for(Entry<Class<?>, IObjectPickler> x: customPicklers.entrySet()) {
if(x.getKey().isAssignableFrom(t)) {
return x.getValue();
}
}
return null;
} |
java | public static HttpResponse execute(HttpRequestBase request) throws IOException {
Assert.notNull(request, "Missing request!");
HttpClient client = HttpClientBuilder.create().setRedirectStrategy(new DefaultRedirectStrategy()).build();
return client.execute(request);
} |
python | def prev_this_next(it, sentinel=None):
"""Utility to return (prev, this, next) tuples from an iterator"""
i1, i2, i3 = tee(it, 3)
next(i3, None)
return zip(chain([sentinel], i1), i2, chain(i3, [sentinel])) |
java | final Entry<K, V> getEntry(Object key) {
int hash = (key == null) ? 0 : hash(key);
for (Entry<K, V> e = table[indexFor(hash, table.length)]; e != null; e = e.next) {
if (e.hash == hash && ((e.key) == key || (key != null && key.equals(e.key)))) return e;
}
return null;
} |
python | def _init_request_logging(self, app):
"""
Sets up request logging unless ``APPINSIGHTS_DISABLE_REQUEST_LOGGING``
is set in the Flask config.
Args:
app (flask.Flask). the Flask application for which to initialize the extension.
"""
enabled = not app.config.get(CONF_DISABLE_REQUEST_LOGGING, False)
if not enabled:
return
self._requests_middleware = WSGIApplication(
self._key, app.wsgi_app, telemetry_channel=self._channel)
app.wsgi_app = self._requests_middleware |
python | def WalkChildren(elem):
"""
Walk the XML tree of children below elem, returning each in order.
"""
for child in elem.childNodes:
yield child
for elem in WalkChildren(child):
yield elem |
java | @Deprecated
public static void setIntHeader(HttpMessage message, String name, int value) {
message.headers().setInt(name, value);
} |
python | def prox_max(X, step, thresh=0):
"""Projection onto numbers below `thresh`
"""
thresh_ = _step_gamma(step, thresh)
above = X - thresh_ > 0
X[above] = thresh_
return X |
java | final public void setOffset(Integer start, Integer end) {
if ((start == null) || (end == null)) {
// do nothing
} else if (start > end) {
throw new IllegalArgumentException("Start offset after end offset");
} else {
tokenOffset = new MtasOffset(start, end);
}
} |
python | def _prepare_find(cls, *args, **kw):
"""Execute a find and return the resulting queryset using combined plain and parametric query generation.
Additionally, performs argument case normalization, refer to the `_prepare_query` method's docstring.
"""
cls, collection, query, options = cls._prepare_query(
cls.FIND_MAPPING,
cls.FIND_OPTIONS,
*args,
**kw
)
if 'await' in options:
raise TypeError("Await is hard-deprecated as reserved keyword in Python 3.7, use wait instead.")
if 'cursor_type' in options and {'tail', 'wait'} & set(options):
raise TypeError("Can not combine cursor_type and tail/wait arguments.")
elif options.pop('tail', False):
options['cursor_type'] = CursorType.TAILABLE_AWAIT if options.pop('wait', True) else CursorType.TAILABLE
elif 'wait' in options:
raise TypeError("Wait option only applies to tailing cursors.")
modifiers = options.get('modifiers', dict())
if 'max_time_ms' in options:
modifiers['$maxTimeMS'] = options.pop('max_time_ms')
if modifiers:
options['modifiers'] = modifiers
return cls, collection, query, options |
python | def _getDevMajorMinor(self, devpath):
"""Return major and minor device number for block device path devpath.
@param devpath: Full path for block device.
@return: Tuple (major, minor).
"""
fstat = os.stat(devpath)
if stat.S_ISBLK(fstat.st_mode):
return(os.major(fstat.st_rdev), os.minor(fstat.st_rdev))
else:
raise ValueError("The file %s is not a valid block device." % devpath) |
python | def render_none(self, context, result):
"""Render empty responses."""
context.response.body = b''
del context.response.content_length
return True |
java | public static void assertEquals(JSONArray expected, JSONArray actual, JSONCompareMode compareMode)
throws JSONException {
assertEquals("", expected, actual, compareMode);
} |
java | public ModifyVpcEndpointConnectionNotificationRequest withConnectionEvents(String... connectionEvents) {
if (this.connectionEvents == null) {
setConnectionEvents(new com.amazonaws.internal.SdkInternalList<String>(connectionEvents.length));
}
for (String ele : connectionEvents) {
this.connectionEvents.add(ele);
}
return this;
} |
java | @Override
public String hget(String key, long field) {
return this.hget(key, Long.toString(field));
} |
python | def tensors_blocked_by_false(ops):
""" Follows a set of ops assuming their value is False and find blocked Switch paths.
This is used to prune away parts of the model graph that are only used during the training
phase (like dropout, batch norm, etc.).
"""
blocked = []
def recurse(op):
if op.type == "Switch":
blocked.append(op.outputs[1]) # the true path is blocked since we assume the ops we trace are False
else:
for out in op.outputs:
for c in out.consumers():
recurse(c)
for op in ops:
recurse(op)
return blocked |
python | def fetch_twitter_lists_for_user_ids_generator(twitter_app_key,
twitter_app_secret,
user_id_list):
"""
Collects at most 500 Twitter lists for each user from an input list of Twitter user ids.
Inputs: - twitter_app_key: What is says on the tin.
- twitter_app_secret: Ditto.
- user_id_list: A python list of Twitter user ids.
Yields: - user_twitter_id: A Twitter user id.
- twitter_lists_list: A python list containing Twitter lists in dictionary (json) format.
"""
####################################################################################################################
# Log into my application.
####################################################################################################################
twitter = login(twitter_app_key,
twitter_app_secret)
####################################################################################################################
# For each user, gather at most 500 Twitter lists.
####################################################################################################################
get_list_memberships_counter = 0
get_list_memberships_time_window_start = time.perf_counter()
for user_twitter_id in user_id_list:
# Make safe twitter request.
try:
twitter_lists_list, get_list_memberships_counter, get_list_memberships_time_window_start\
= safe_twitter_request_handler(twitter_api_func=twitter.get_list_memberships,
call_rate_limit=15,
call_counter=get_list_memberships_counter,
time_window_start=get_list_memberships_time_window_start,
max_retries=5,
wait_period=2,
user_id=user_twitter_id,
count=500,
cursor=-1)
# If the call is succesful, yield the list of Twitter lists.
yield user_twitter_id, twitter_lists_list
except twython.TwythonError:
# If the call is unsuccesful, we do not have any Twitter lists to store.
yield user_twitter_id, None
except URLError:
# If the call is unsuccesful, we do not have any Twitter lists to store.
yield user_twitter_id, None
except BadStatusLine:
# If the call is unsuccesful, we do not have any Twitter lists to store.
yield user_twitter_id, None |
python | def timestamp_to_str(t, datetime_format=DATETIME_FORMAT, *, inverse=False):
"""
Given a POSIX timestamp (integer) ``t``,
format it as a datetime string in the given format.
If ``inverse``, then do the inverse, that is, assume ``t`` is
a datetime string in the given format and return its corresponding
timestamp.
If ``format is None``, then return ``t`` as a string
(if not ``inverse``) or as an integer (if ``inverse``) directly.
"""
if not inverse:
if datetime_format is None:
result = str(t)
else:
result = dt.datetime.fromtimestamp(t).strftime(datetime_format)
else:
if format is None:
result = int(t)
else:
result = dt.datetime.strptime(t, datetime_format).timestamp()
return result |
java | public EsiVerifyResponse getVerify(String userAgent, String xUserAgent, String datasource, String token,
String authorization) throws ApiException {
ApiResponse<EsiVerifyResponse> resp = getVerifyWithHttpInfo(userAgent, xUserAgent, datasource, token,
authorization);
return resp.getData();
} |
python | def repr_return(func):
"""
This is a decorator to give the return value a pretty print repr
"""
def repr_return_decorator(*args, **kwargs):
ret = func(*args, **kwargs)
if isinstance(ret, basestring):
return ret
if type(ret) in repr_map:
return repr_map[type(ret)](ret)
print('=' * 80 + '\n' +
' FAILED TO GET REPR RETURN for type (' +
str(type(ret)) + '\n' + '=' * 80)
return ret
return repr_return_decorator |
java | public void logMapTaskFinished(TaskAttemptID taskAttemptId,
long finishTime,
String hostName,
String taskType,
String stateString,
Counters counter) {
if (disableHistory) {
return;
}
JobID id = taskAttemptId.getJobID();
if (!this.jobId.equals(id)) {
throw new RuntimeException("JobId from task: " + id +
" does not match expected: " + jobId);
}
if (null != writers) {
log(writers, RecordTypes.MapAttempt,
new Keys[]{ Keys.TASK_TYPE, Keys.TASKID,
Keys.TASK_ATTEMPT_ID, Keys.TASK_STATUS,
Keys.FINISH_TIME, Keys.HOSTNAME,
Keys.STATE_STRING, Keys.COUNTERS},
new String[]{taskType,
taskAttemptId.getTaskID().toString(),
taskAttemptId.toString(),
Values.SUCCESS.name(),
String.valueOf(finishTime), hostName,
stateString,
counter.makeEscapedCompactString()});
}
} |
java | public static final KeyPressHandler getPercentKeyPressHandler() { // NOPMD it's thread save!
if (HandlerFactory.percentKeyPressHandler == null) {
synchronized (PercentKeyPressHandler.class) {
if (HandlerFactory.percentKeyPressHandler == null) {
HandlerFactory.percentKeyPressHandler = new PercentKeyPressHandler();
}
}
}
return HandlerFactory.percentKeyPressHandler;
} |
python | def format_heading(self, heading):
"""
This translates any heading of "options" or "Options" into
"SCons Options." Unfortunately, we have to do this here,
because those titles are hard-coded in the optparse calls.
"""
if heading == 'Options':
heading = "SCons Options"
return optparse.IndentedHelpFormatter.format_heading(self, heading) |
java | public boolean isDescendant(DataDescriptor other) {
if (other == null)
return false;
if (type == TYPE_INVALID || other.getType() == TYPE_INVALID)
return false;
String[] otherPath = other.getPath();
if (otherPath.length >= dataPath.length)
return false;
for (int i = 0; i < otherPath.length; i++) {
if (!dataPath[i].equals(otherPath[i]))
return false;
}
return true;
} |
python | def download(self):
"""
MLBAM dataset download
"""
p = Pool()
p.map(self._download, self.days) |
python | def save_named_query(self, alias, querystring, afterwards=None):
"""
add an alias for a query string.
These are stored in the notmuch database and can be used as part of
more complex queries using the syntax "query:alias".
See :manpage:`notmuch-search-terms(7)` for more info.
:param alias: name of shortcut
:type alias: str
:param querystring: value, i.e., the full query string
:type querystring: str
:param afterwards: callback to trigger after adding the alias
:type afterwards: callable or None
"""
if self.ro:
raise DatabaseROError()
self.writequeue.append(('setconfig', afterwards, 'query.' + alias,
querystring)) |
python | def endBy1(p, sep):
'''`endBy1(p, sep) parses one or more occurrences of `p`, separated and
ended by `sep`. Returns a list of values returned by `p`.'''
return separated(p, sep, 1, maxt=float('inf'), end=True) |
python | def AsDict(self, dt=True):
"""
A dict representation of this User instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this User instance
"""
data = {}
if self.name:
data['name'] = self.name
data['mlkshk_url'] = self.mlkshk_url
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.id:
data['id'] = self.id
if self.about:
data['about'] = self.about
if self.website:
data['website'] = self.website
if self.shakes:
data['shakes'] = [shk.AsDict(dt=dt) for shk in self.shakes]
data['shake_count'] = self.shake_count
return data |
python | def compose_info(root_dir, files, hash_fn, aleph_record, urn_nbn=None):
"""
Compose `info` XML file.
Info example::
<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>
<info>
<created>2014-07-31T10:58:53</created>
<metadataversion>1.0</metadataversion>
<packageid>c88f5a50-7b34-11e2-b930-005056827e51</packageid>
<mainmets>mets.xml</mainmets>
<titleid type="ccnb">cnb001852189</titleid>
<titleid type="isbn">978-80-85979-89-6</titleid>
<collection>edeposit</collection>
<institution>nakladatelství Altar</institution>
<creator>ABA001</creator>
<size>1530226</size>
<itemlist itemtotal="1">
<item>\data\Denik_zajatce_Sramek_CZ_v30f-font.epub</item>
</itemlist>
<checksum type="MD5" checksum="ce076548eaade33888005de5d4634a0d">
\MD5.md5
</checksum>
</info>
Args:
root_dir (str): Absolute path to the root directory.
files (list): Absolute paths to all ebook and metadata files.
hash_fn (str): Absolute path to the MD5 file.
aleph_record (str): String with Aleph record with metadata.
Returns:
str: XML string.
"""
# compute hash for hashfile
with open(hash_fn) as f:
hash_file_md5 = hashlib.md5(f.read()).hexdigest()
schema_location = "http://www.ndk.cz/standardy-digitalizace/info11.xsd"
document = odict[
"info": odict[
"@xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance",
"@xsi:noNamespaceSchemaLocation": schema_location,
"created": time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime()),
"metadataversion": "1.0",
"packageid": _path_to_id(root_dir),
# not used in SIP
# "mainmets": _get_localized_fn(metadata_fn, root_dir),
"titleid": None,
"collection": "edeposit",
"institution": None,
"creator": None,
"size": _calc_dir_size(root_dir) / 1024, # size in kiB
"itemlist": odict[
"@itemtotal": "2",
"item": map(
lambda x: _get_localized_fn(x, root_dir),
files
)
],
"checksum": odict[
"@type": "MD5",
"@checksum": hash_file_md5,
"#text": _get_localized_fn(hash_fn, root_dir)
],
]
]
# get informations from MARC record
record = MARCXMLRecord(aleph_record)
# get publisher info
publisher = unicode(record.get_publisher(), "utf-8")
if record.get_publisher(None):
document["info"]["institution"] = remove_hairs(publisher)
# get <creator> info
creator = record.getDataRecords("910", "a", False)
alt_creator = record.getDataRecords("040", "d", False)
document["info"]["creator"] = creator[0] if creator else alt_creator[-1]
# collect informations for <titleid> tags
isbns = record.get_ISBNs()
ccnb = record.getDataRecords("015", "a", False)
ccnb = ccnb[0] if ccnb else None
if any([isbns, ccnb, urn_nbn]): # TODO: issn
document["info"]["titleid"] = []
for isbn in isbns:
document["info"]["titleid"].append({
"@type": "isbn",
"#text": isbn
})
if ccnb:
document["info"]["titleid"].append({
"@type": "ccnb",
"#text": ccnb
})
if urn_nbn:
document["info"]["titleid"].append({
"@type": "urnnbn",
"#text": urn_nbn
})
# TODO: later
# if issn:
# document["info"]["titleid"].append({
# "@type": "issn",
# "#text": issn
# })
# remove unset options
unset_keys = [
key
for key in document["info"]
if key is None
]
for key in unset_keys:
del document[key]
xml_document = xmltodict.unparse(document, pretty=True)
return xml_document.encode("utf-8") |
java | public final void sendCombinedMessage(Message combinedMessage) {
outValue.f1 = Either.Right(combinedMessage);
out.collect(outValue);
} |
python | def _versions_from_changelog(changelog):
"""
Return all released versions from given ``changelog``, sorted.
:param dict changelog:
A changelog dict as returned by ``releases.util.parse_changelog``.
:returns: A sorted list of `semantic_version.Version` objects.
"""
versions = [Version(x) for x in changelog if BUGFIX_RELEASE_RE.match(x)]
return sorted(versions) |
java | private static void roundAndAdd(Collection<Integer> result, double value)
{
int roundedValue = (int) Math.round(value);
if (!result.contains(roundedValue))
{
result.add(roundedValue);
}
} |
java | public static String generateRandomCodeVerifier(SecureRandom entropySource, int entropyBytes) {
byte[] randomBytes = new byte[entropyBytes];
entropySource.nextBytes(randomBytes);
return Base64.getUrlEncoder().withoutPadding().encodeToString(randomBytes);
} |
java | @Override
protected void parse() {
confirmDropMessage = getBoolean(PARAM_CONFIRM_DROP_MESSAGE_KEY, false);
buttonMode = getInt(PARAM_UI_BUTTON_MODE, BUTTON_MODE_SIMPLE);
alwaysOnTop = getConfig().getBoolean(PARAM_BRK_ALWAYS_ON_TOP, null);
inScopeOnly = getBoolean(PARAM_BRK_IN_SCOPE_ONLY, false);
} |
java | public MapBuilder<KEY, VALUE> putIfAbsent(KEY key, VALUE value) {
getMap().putIfAbsent(key, value);
return this;
} |
java | @Override
public UpdateFieldLevelEncryptionProfileResult updateFieldLevelEncryptionProfile(UpdateFieldLevelEncryptionProfileRequest request) {
request = beforeClientExecution(request);
return executeUpdateFieldLevelEncryptionProfile(request);
} |
java | public static byte[] toByteArray(final long l)
{
final byte[] bytes = new byte[8];
bytes[0] = (byte) ((l >>> 56) & 0xff);
bytes[1] = (byte) ((l >>> 48) & 0xff);
bytes[2] = (byte) ((l >>> 40) & 0xff);
bytes[3] = (byte) ((l >>> 32) & 0xff);
bytes[4] = (byte) ((l >>> 24) & 0xff);
bytes[5] = (byte) ((l >>> 16) & 0xff);
bytes[6] = (byte) ((l >>> 8) & 0xff);
bytes[7] = (byte) ((l >>> 0) & 0xff);
return bytes;
} |
python | def listStoredSms(self, status=Sms.STATUS_ALL, memory=None, delete=False):
""" Returns SMS messages currently stored on the device/SIM card.
The messages are read from the memory set by the "memory" parameter.
:param status: Filter messages based on this read status; must be 0-4 (see Sms class)
:type status: int
:param memory: The memory type to read from. If None, use the current default SMS read memory
:type memory: str or None
:param delete: If True, delete returned messages from the device/SIM card
:type delete: bool
:return: A list of Sms objects containing the messages read
:rtype: list
"""
self._setSmsMemory(readDelete=memory)
messages = []
delMessages = set()
if self._smsTextMode:
cmglRegex= re.compile(r'^\+CMGL: (\d+),"([^"]+)","([^"]+)",[^,]*,"([^"]+)"$')
for key, val in dictItemsIter(Sms.TEXT_MODE_STATUS_MAP):
if status == val:
statusStr = key
break
else:
raise ValueError('Invalid status value: {0}'.format(status))
result = self.write('AT+CMGL="{0}"'.format(statusStr))
msgLines = []
msgIndex = msgStatus = number = msgTime = None
for line in result:
cmglMatch = cmglRegex.match(line)
if cmglMatch:
# New message; save old one if applicable
if msgIndex != None and len(msgLines) > 0:
msgText = '\n'.join(msgLines)
msgLines = []
messages.append(ReceivedSms(self, Sms.TEXT_MODE_STATUS_MAP[msgStatus], number, parseTextModeTimeStr(msgTime), msgText))
delMessages.add(int(msgIndex))
msgIndex, msgStatus, number, msgTime = cmglMatch.groups()
msgLines = []
else:
if line != 'OK':
msgLines.append(line)
if msgIndex != None and len(msgLines) > 0:
msgText = '\n'.join(msgLines)
msgLines = []
messages.append(ReceivedSms(self, Sms.TEXT_MODE_STATUS_MAP[msgStatus], number, parseTextModeTimeStr(msgTime), msgText))
delMessages.add(int(msgIndex))
else:
cmglRegex = re.compile(r'^\+CMGL:\s*(\d+),\s*(\d+),.*$')
readPdu = False
result = self.write('AT+CMGL={0}'.format(status))
for line in result:
if not readPdu:
cmglMatch = cmglRegex.match(line)
if cmglMatch:
msgIndex = int(cmglMatch.group(1))
msgStat = int(cmglMatch.group(2))
readPdu = True
else:
try:
smsDict = decodeSmsPdu(line)
except EncodingError:
self.log.debug('Discarding line from +CMGL response: %s', line)
else:
if smsDict['type'] == 'SMS-DELIVER':
sms = ReceivedSms(self, int(msgStat), smsDict['number'], smsDict['time'], smsDict['text'], smsDict['smsc'])
elif smsDict['type'] == 'SMS-STATUS-REPORT':
sms = StatusReport(self, int(msgStat), smsDict['reference'], smsDict['number'], smsDict['time'], smsDict['discharge'], smsDict['status'])
else:
raise CommandError('Invalid PDU type for readStoredSms(): {0}'.format(smsDict['type']))
messages.append(sms)
delMessages.add(msgIndex)
readPdu = False
if delete:
if status == Sms.STATUS_ALL:
# Delete all messages
self.deleteMultipleStoredSms()
else:
for msgIndex in delMessages:
self.deleteStoredSms(msgIndex)
return messages |
java | protected int getNextIndex(final int currentIndex) {
if (!isPaused()) {
return currentIndex + 1;
}
int ret = currentIndex;
while (isPaused()) {
sendStepPausing(ret + 1);
try {
Thread.sleep(getPauseSpan());
} catch (InterruptedException e) {
LOG.warn("thread.sleep.error", e);
}
TestScript testScript = current.getTestScript();
if (testScript.isScriptFileChanged()) {
LOG.info("script.file.changed");
current.setTestScript(
dao.load(testScript.getScriptFile(), testScript.getSheetName(), false));
}
// コンソールから有効なコマンド入力があるまでループします。
if (cmd == null || cmd.key == null) {
continue;
}
final int cmdRet = cmd.execute(ret, current.getTestScript(), appCtx);
if (cmdRet < 0) {
LOG.info("cmd.error");
} else {
ret = cmdRet;
if (current.getTestScript().getTestStep(ret) == null) {
LOG.info("test.step.end");
break;
}
LOG.info("empty");
LOG.info("test.step.next.prev");
for (int i = ret - 1; i <= ret + 1; i++) {
TestStep nextStep = current.getTestScript().getTestStep(i);
if (nextStep == null) {
continue;
}
String nextMark = i == ret + 1 ? " <- 次に実行" : "";
LOG.info("test.step.next", new Object[] { nextStep.getNo(),
nextStep.getItemName(), nextStep.getLocator(), nextMark });
}
LOG.info("empty");
}
//
if (cmd.key.release && cmdRet >= 0) {
cmd = null;
break;
} else {
cmd = null;
}
}
return ret;
} |
python | def sink(wrapped):
"""Creates an SPL operator with a single input port.
A SPL operator with a single input port and no output ports.
For each tuple on the input port the decorated function
is called passing the contents of the tuple.
.. deprecated:: 1.8
Recommended to use :py:class:`@spl.for_each <for_each>` instead.
"""
if not inspect.isfunction(wrapped):
raise TypeError('A function is required')
return _wrapforsplop(_OperatorType.Sink, wrapped, 'position', False) |
python | def _uri_split(uri):
"""Splits up an URI or IRI."""
scheme, netloc, path, query, fragment = _safe_urlsplit(uri)
auth = None
port = None
if '@' in netloc:
auth, netloc = netloc.split('@', 1)
if netloc.startswith('['):
host, port_part = netloc[1:].split(']', 1)
if port_part.startswith(':'):
port = port_part[1:]
elif ':' in netloc:
host, port = netloc.split(':', 1)
else:
host = netloc
return scheme, auth, host, port, path, query, fragment |
java | public void reconnect(final JobID jobId) {
Preconditions.checkNotNull(jobId, "JobID must not be null.");
final Tuple2<LeaderRetrievalService, JobManagerLeaderListener> jobLeaderService = jobLeaderServices.get(jobId);
if (jobLeaderService != null) {
jobLeaderService.f1.reconnect();
} else {
LOG.info("Cannot reconnect to job {} because it is not registered.", jobId);
}
} |
java | protected void createInitialAuthenticationRequestValidationCheckAction(final Flow flow) {
val action = createActionState(flow, CasWebflowConstants.STATE_ID_INITIAL_AUTHN_REQUEST_VALIDATION_CHECK,
CasWebflowConstants.ACTION_ID_INITIAL_AUTHN_REQUEST_VALIDATION);
createTransitionForState(action, CasWebflowConstants.TRANSITION_ID_AUTHENTICATION_FAILURE, CasWebflowConstants.STATE_ID_HANDLE_AUTHN_FAILURE);
createTransitionForState(action, CasWebflowConstants.TRANSITION_ID_ERROR, CasWebflowConstants.STATE_ID_INIT_LOGIN_FORM);
createTransitionForState(action, CasWebflowConstants.TRANSITION_ID_SUCCESS, CasWebflowConstants.STATE_ID_TICKET_GRANTING_TICKET_CHECK);
createTransitionForState(action, CasWebflowConstants.TRANSITION_ID_SUCCESS_WITH_WARNINGS, CasWebflowConstants.STATE_ID_SHOW_AUTHN_WARNING_MSGS);
} |
java | @Override
public void initializeParts() {
super.initializeParts();
node = new ListView<>();
node.getStyleClass().add("simple-listview-control");
fieldLabel = new Label(field.labelProperty().getValue());
node.setItems(field.getItems());
node.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
for (int i = 0; i < field.getItems().size(); i++) {
if (field.getSelection().contains(field.getItems().get(i))) {
node.getSelectionModel().select(i);
} else {
node.getSelectionModel().clearSelection(i);
}
}
} |
python | def String(length=None, **kwargs):
"""A string valued property with max. `length`."""
return Property(
length=length,
types=stringy_types,
convert=to_string,
**kwargs
) |
java | @Override
public Collection<V> put(K key, Collection<V> value) {
throw new UnsupportedOperationException();
} |
java | public ResourceBundle getResourceBundle(FacesContext ctx, String name) throws FacesException, NullPointerException
{
Application application = getMyfacesApplicationInstance(ctx);
if (application != null)
{
return application.getResourceBundle(ctx, name);
}
throw new UnsupportedOperationException();
} |
java | private void overrideViolationMessageIfNecessary(List<Violation> violations) {
if (violationMessage != null && violations != null) {
for (Violation violation : violations) {
violation.setMessage(violationMessage);
}
}
} |
java | protected Double getBilinearInterpolationValue(GriddedTile griddedTile,
TImage image, Double[][] leftLastColumns, Double[][] topLeftRows,
Double[][] topRows, int y, int x, float widthRatio,
float heightRatio, float destTop, float destLeft, float srcTop,
float srcLeft) {
// Determine which source pixel to use
float xSource = getXSource(x, destLeft, srcLeft, widthRatio);
float ySource = getYSource(y, destTop, srcTop, heightRatio);
CoverageDataSourcePixel sourcePixelX = getXSourceMinAndMax(xSource);
CoverageDataSourcePixel sourcePixelY = getYSourceMinAndMax(ySource);
Double[][] values = new Double[2][2];
populateValues(griddedTile, image, leftLastColumns, topLeftRows,
topRows, sourcePixelX, sourcePixelY, values);
Double value = null;
if (values != null) {
value = getBilinearInterpolationValue(sourcePixelX, sourcePixelY,
values);
}
return value;
} |
java | private int getNewReconnectIvl()
{
// The new interval is the current interval + random value.
int interval = currentReconnectIvl + (Utils.randomInt() % options.reconnectIvl);
// Only change the current reconnect interval if the maximum reconnect
// interval was set and if it's larger than the reconnect interval.
if (options.reconnectIvlMax > 0 && options.reconnectIvlMax > options.reconnectIvl) {
// Calculate the next interval
currentReconnectIvl = Math.min(currentReconnectIvl * 2, options.reconnectIvlMax);
}
return interval;
} |
python | def delete_group(self, group):
""" Group was deleted. """
try:
lgroup = self._get_group(group.name)
delete(lgroup, database=self._database)
except ObjectDoesNotExist:
# it doesn't matter if it doesn't exist
pass |
java | public <CTX> HtmlSanitizer.Policy build(
HtmlStreamEventReceiver out,
@Nullable HtmlChangeListener<? super CTX> listener,
@Nullable CTX context) {
return toFactory().apply(out, listener, context);
} |
java | public static Map<InetAddress, UUID> loadHostIds()
{
Map<InetAddress, UUID> hostIdMap = new HashMap<InetAddress, UUID>();
for (UntypedResultSet.Row row : executeInternal("SELECT peer, host_id FROM system." + PEERS_CF))
{
InetAddress peer = row.getInetAddress("peer");
if (row.has("host_id"))
{
hostIdMap.put(peer, row.getUUID("host_id"));
}
}
return hostIdMap;
} |
python | def applyTuple(self, tuple, right, env):
"""Apply a tuple to something else."""
if len(right) != 1:
raise exceptions.EvaluationError('Tuple (%r) can only be applied to one argument, got %r' % (self.left, self.right))
right = right[0]
return tuple(right) |
java | public DynamicType.Builder<T> merge(Collection<? extends ModifierContributor.ForType> modifierContributors) {
throw new UnsupportedOperationException("Cannot change modifiers of decorated type: " + instrumentedType);
} |
python | async def make_default_options_response(self) -> Response:
"""This is the default route function for OPTIONS requests."""
methods = _request_ctx_stack.top.url_adapter.allowed_methods()
return self.response_class('', headers={'Allow': ', '.join(methods)}) |
java | public long getFreeSpace() {
try {
StructStatVfs sb = Libcore.os.statvfs(path);
return sb.f_bfree * sb.f_bsize; // free block count * block size in bytes.
} catch (ErrnoException errnoException) {
return 0;
}
} |
java | @Pure
public static Element getElementFromPath(Node document, boolean caseSensitive, String... path) {
assert document != null : AssertMessages.notNullParameter(0);
return getElementFromPath(document, caseSensitive, 0, path);
} |
java | private EConvResult transConv(byte[] in, Ptr inPtr, int inStop, byte[] out, Ptr outPtr, int outStop, int flags, Ptr resultPositionPtr) {
// null check
if (elements[0].lastResult == EConvResult.AfterOutput) elements[0].lastResult = EConvResult.SourceBufferEmpty;
for (int i = numTranscoders - 1; 0 <= i; i--) {
switch (elements[i].lastResult) {
case InvalidByteSequence:
case IncompleteInput:
case UndefinedConversion:
case AfterOutput:
case Finished:
return transConvNeedReport(in, inPtr, inStop, out, outPtr, outStop, flags, resultPositionPtr, i + 1, i);
case DestinationBufferFull:
case SourceBufferEmpty:
break;
default:
throw new InternalException("unexpected transcode last result");
}
}
/* /^[sd]+$/ is confirmed. but actually /^s*d*$/. */
if (elements[numTranscoders - 1].lastResult == EConvResult.DestinationBufferFull && (flags & AFTER_OUTPUT) != 0) {
EConvResult res = transConv(NULL_STRING, Ptr.NULL, 0, out, outPtr, outStop, (flags & ~AFTER_OUTPUT) | PARTIAL_INPUT, resultPositionPtr);
return res.isSourceBufferEmpty() ? EConvResult.AfterOutput : res;
}
return transConvNeedReport(in, inPtr, inStop, out, outPtr, outStop, flags, resultPositionPtr, 0, -1);
} |
java | public <A extends Annotation> A findAnnotationOnBean(Class<A> annotationType) {
return applicationContext.findAnnotationOnBean(beanName, annotationType);
} |
python | def bothify(self, text='## ??', letters=string.ascii_letters):
"""
Replaces all placeholders with random numbers and letters.
:param text: string to be parsed
:returns: string with all numerical and letter placeholders filled in
"""
return self.lexify(self.numerify(text), letters=letters) |
python | def from_json_str(cls, json_str):
"""Convert json string representation into class instance.
Args:
json_str: json representation as string.
Returns:
New instance of the class with data loaded from json string.
"""
return cls.from_json(json.loads(json_str, cls=JsonDecoder)) |
python | def iteritems(self):
"""Present the email headers"""
for n,v in self.msgobj.__dict__["_headers"]:
yield n.lower(), v
return |
java | public static String getGeometryTypeNameFromConstraint(String constraint, int numericPrecision) {
int geometryTypeCode = GeometryTypeFromConstraint.geometryTypeFromConstraint(constraint, numericPrecision);
return SFSUtilities.getGeometryTypeNameFromCode(geometryTypeCode);
} |
java | @Override
public void groupingValue(String collateName) {
// collationName is ignored for now
if (groupBy == null) {
groupBy = new ArrayList<>(ARRAY_INITIAL_LENGTH);
}
groupBy.add(resolveAlias(propertyPath));
} |
java | private void checkTenantApps(Tenant tenant) {
m_logger.info(" Tenant: {}", tenant.getName());
try {
Iterator<DRow> rowIter =
DBService.instance(tenant).getAllRows(SchemaService.APPS_STORE_NAME).iterator();
if (!rowIter.hasNext()) {
m_logger.info(" <no applications>");
}
while (rowIter.hasNext()) {
DRow row = rowIter.next();
ApplicationDefinition appDef = loadAppRow(tenant, getColumnMap(row.getAllColumns(1024).iterator()));
if (appDef != null) {
String appName = appDef.getAppName();
String ssName = getStorageServiceOption(appDef);
m_logger.info(" Application '{}': StorageService={}; keyspace={}",
new Object[]{appName, ssName, tenant.getName()});
if (DoradusServer.instance().findStorageService(ssName) == null) {
m_logger.warn(" >>>Application '{}' uses storage service '{}' which has not been " +
"initialized; application will not be accessible via this server",
appDef.getAppName(), ssName);
}
}
}
} catch (Exception e) {
m_logger.warn("Could not check tenant '" + tenant.getName() +
"'. Applications may be unavailable.", e);
}
} |
python | def dnld_gaf(species_txt, prt=sys.stdout, loading_bar=True):
"""Download GAF file if necessary."""
return dnld_gafs([species_txt], prt, loading_bar)[0] |
python | def parse(self, target):
""" Parse nested rulesets
and save it in cache.
"""
if isinstance(target, ContentNode):
if target.name:
self.parent = target
self.name.parse(self)
self.name += target.name
target.ruleset.append(self)
self.root.cache['rset'][str(self.name).split()[0]].add(self)
super(Ruleset, self).parse(target) |
python | def site(self, **params):
"""Stream site
Accepted params found at:
https://dev.twitter.com/docs/api/1.1/get/site
"""
url = 'https://sitestream.twitter.com/%s/site.json' \
% self.streamer.api_version
self.streamer._request(url, params=params) |
java | public boolean userHasLock(String username) {
Objects.requireNonNull(username, Required.USERNAME.toString());
boolean lock = false;
Config config = Application.getInstance(Config.class);
Cache cache = Application.getInstance(CacheProvider.class).getCache(CacheName.AUTH);
AtomicInteger counter = cache.getCounter(username);
if (counter != null && counter.get() > config.getAuthenticationLock()) {
lock = true;
}
return lock;
} |
java | public TextMessageBuilder addQuickLocationReply(String locationMessage) {
if (this.quickReplies == null) {
this.quickReplies = new ArrayList<QuickReply>();
}
this.quickReplies.add(new QuickReply(locationMessage));
return this;
} |
python | def validate(self):
""" validate: Makes sure audio is valid
Args: None
Returns: boolean indicating if audio is valid
"""
from .files import AudioFile
try:
assert self.kind == content_kinds.AUDIO, "Assumption Failed: Node should be audio"
assert self.questions == [], "Assumption Failed: Audio should not have questions"
assert len(self.files) > 0, "Assumption Failed: Audio should have at least one file"
assert any(filter(lambda f: isinstance(f, AudioFile), self.files)), "Assumption Failed: Audio should have at least one audio file"
return super(AudioNode, self).validate()
except AssertionError as ae:
raise InvalidNodeException("Invalid node ({}): {} - {}".format(ae.args[0], self.title, self.__dict__)) |
python | def _group_chunks_by_entities(self, chunks, entities):
"""Groups chunks by entities retrieved from NL API Entity Analysis.
Args:
chunks (:obj:`budou.chunk.ChunkList`): List of chunks to be processed.
entities (:obj:`list` of :obj:`dict`): List of entities.
Returns:
A chunk list. (:obj:`budou.chunk.ChunkList`)
"""
for entity in entities:
chunks_to_concat = chunks.get_overlaps(
entity['beginOffset'], len(entity['content']))
if not chunks_to_concat:
continue
new_chunk_word = u''.join([chunk.word for chunk in chunks_to_concat])
new_chunk = Chunk(new_chunk_word)
chunks.swap(chunks_to_concat, new_chunk)
return chunks |
python | def _namify_arguments(mapping):
"""
Ensure that a mapping of names to parameters has the parameters set to the
correct name.
"""
result = []
for name, parameter in mapping.iteritems():
parameter.name = name
result.append(parameter)
return result |
python | def gradings(gradingScheme):
''' Determine the list of gradings in this scheme as rendered string.
TODO: Use nice little icons instead of (p) / (f) marking.
'''
result = []
for grading in gradingScheme.gradings.all():
if grading.means_passed:
result.append(str(grading) + " (pass)")
else:
result.append(str(grading) + " (fail)")
return ' - '.join(result) |
java | @Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
if ((arguments==null) || (arguments.length!=1)) {
return null;
}
BitcoinTransaction bitcoinTransaction;
if (arguments[0].get() instanceof HiveBitcoinTransaction) { // this happens if the table is in the original file format
bitcoinTransaction = BitcoinUDFUtil.convertToBitcoinTransaction((HiveBitcoinTransaction)arguments[0].get());
} else { // this happens if the table has been imported into a more optimized analytics format, such as ORC. However, usually we expect that the first case will be used mostly (the hash is generated during extraction from the input format)
// check if all bitcointransaction fields are available <struct<version:int,incounter:binary,outcounter:binary,listofinputs:array<struct<prevtransactionhash:binary,previoustxoutindex:bigint,txinscriptlength:binary,txinscript:binary,seqno:bigint>>,listofoutputs:array<struct<value:bigint,txoutscriptlength:binary,txoutscript:binary>>,locktime:int>
Object originalObject=arguments[0].get();
StructField versionSF=soi.getStructFieldRef("version");
StructField incounterSF=soi.getStructFieldRef("incounter");
StructField outcounterSF=soi.getStructFieldRef("outcounter");
StructField listofinputsSF=soi.getStructFieldRef("listofinputs");
StructField listofoutputsSF=soi.getStructFieldRef("listofoutputs");
StructField locktimeSF=soi.getStructFieldRef("locktime");
boolean inputsNull = (incounterSF==null) || (listofinputsSF==null);
boolean outputsNull = (outcounterSF==null) || (listofoutputsSF==null);
boolean otherAttributeNull = (versionSF==null) || (locktimeSF==null);
if (inputsNull || outputsNull || otherAttributeNull) {
LOG.warn("Structure does not correspond to BitcoinTransaction");
return null;
}
int version = wioi.get(soi.getStructFieldData(originalObject,versionSF));
byte[] inCounter = wboi.getPrimitiveJavaObject(soi.getStructFieldData(originalObject,incounterSF));
byte[] outCounter = wboi.getPrimitiveJavaObject(soi.getStructFieldData(originalObject,outcounterSF));
int locktime = wioi.get(soi.getStructFieldData(originalObject,locktimeSF));
Object listofinputsObject = soi.getStructFieldData(originalObject,listofinputsSF);
ListObjectInspector loiInputs=(ListObjectInspector)listofinputsSF.getFieldObjectInspector();
List<BitcoinTransactionInput> listOfInputsArray = readListOfInputsFromTable(loiInputs,listofinputsObject);
Object listofoutputsObject = soi.getStructFieldData(originalObject,listofoutputsSF);
ListObjectInspector loiOutputs=(ListObjectInspector)listofoutputsSF.getFieldObjectInspector();
List<BitcoinTransactionOutput> listOfOutputsArray = readListOfOutputsFromTable(loiOutputs,listofoutputsObject);
bitcoinTransaction = new BitcoinTransaction(version,inCounter,listOfInputsArray,outCounter,listOfOutputsArray,locktime);
}
byte[] transactionHash=null;
try {
transactionHash = BitcoinUtil.getTransactionHash(bitcoinTransaction);
} catch (IOException ioe) {
LOG.error(ioe);
throw new HiveException(ioe.toString());
}
return new BytesWritable(transactionHash);
} |
python | def calculate_lvgd_voltage_current_stats(nw):
"""
LV Voltage and Current Statistics for an arbitrary network
Note
----
Aggregated Load Areas are excluded.
Parameters
----------
nw: :any:`list` of NetworkDing0
The MV grid(s) to be studied
Returns
-------
pandas.DataFrame
nodes_df : Dataframe containing voltage, respectively current, statis
for every critical node, resp. every critical station, in every LV grid
in nw.
pandas.DataFrame
lines_df : Dataframe containing current statistics for every critical
line, in every LV grid in nw.
"""
##############################
# close circuit breakers
nw.control_circuit_breakers(mode='close')
##############################
nodes_idx = 0
nodes_dict = {}
branches_idx = 0
branches_dict = {}
for mv_district in nw.mv_grid_districts():
for LA in mv_district.lv_load_areas():
if not LA.is_aggregated:
for lv_district in LA.lv_grid_districts():
# nodes voltage
crit_nodes = get_critical_voltage_at_nodes(lv_district.lv_grid)
for node in crit_nodes:
nodes_idx += 1
nodes_dict[nodes_idx] = {
'MV_grid_id': mv_district.mv_grid.id_db,
'LV_grid_id': lv_district.lv_grid.id_db,
'LA_id': LA.id_db,
'node id': node['node'].__repr__(),
'v_diff_0': node['v_diff'][0],
'v_diff_1': node['v_diff'][1],
's_max_0': 'NA',
's_max_1': 'NA',
'V nominal': lv_district.lv_grid.v_level,
}
# branches currents
critical_branches, critical_stations = get_critical_line_loading(lv_district.lv_grid)
for branch in critical_branches:
branches_idx += 1
branches_dict[branches_idx] = {
'MV_grid_id': mv_district.mv_grid.id_db,
'LV_grid_id': lv_district.lv_grid.id_db,
'LA_id': LA.id_db,
'branch id': branch['branch'].__repr__(),
's_max_0': branch['s_max'][0],
's_max_1': branch['s_max'][1],
}
# stations
for node in critical_stations:
nodes_idx += 1
nodes_dict[nodes_idx] = {
'MV_grid_id': mv_district.mv_grid.id_db,
'LV_grid_id': lv_district.lv_grid.id_db,
'LA_id': LA.id_db,
'node id': node['station'].__repr__(),
's_max_0': node['s_max'][0],
's_max_1': node['s_max'][1],
'v_diff_0': 'NA',
'v_diff_1': 'NA',
}
nodes_df = pd.DataFrame.from_dict(nodes_dict, orient='index')
branches_df = pd.DataFrame.from_dict(branches_dict, orient='index')
if not nodes_df.empty:
nodes_df = nodes_df.set_index('node id')
nodes_df = nodes_df.fillna(0)
nodes_df = nodes_df[sorted(nodes_df.columns.tolist())]
nodes_df.sort_index(inplace=True)
if not branches_df.empty:
branches_df = branches_df.set_index('branch id')
branches_df = branches_df.fillna(0)
branches_df = branches_df[sorted(branches_df.columns.tolist())]
branches_df.sort_index(inplace=True)
return nodes_df, branches_df |
python | def _parse_custom_mpi_options(custom_mpi_options):
# type: (str) -> Tuple[argparse.Namespace, List[str]]
"""Parse custom MPI options provided by user. Known options default value will be overridden
and unknown options would be identified separately."""
parser = argparse.ArgumentParser()
parser.add_argument('--NCCL_DEBUG', default="INFO", type=str)
return parser.parse_known_args(custom_mpi_options.split()) |
python | def parabola(xy, amplitude, x0, y0, sx, sy, theta):
"""Evaluate a 2D parabola given by:
f(x,y) = f_0 - (1/2) * \delta^T * R * \Sigma * R^T * \delta
where
\delta = [(x - x_0), (y - y_0)]
and R is the matrix for a 2D rotation by angle \theta and \Sigma
is the covariance matrix:
\Sigma = [[1/\sigma_x^2, 0 ],
[0 , 1/\sigma_y^2]]
Parameters
----------
xy : tuple
Tuple containing x and y arrays for the values at which the
parabola will be evaluated.
amplitude : float
Constant offset value.
x0 : float
Centroid in x coordinate.
y0 : float
Centroid in y coordinate.
sx : float
Standard deviation along first axis (x-axis when theta=0).
sy : float
Standard deviation along second axis (y-axis when theta=0).
theta : float
Rotation angle in radians.
Returns
-------
vals : `~numpy.ndarray`
Values of the parabola evaluated at the points defined in the
`xy` input tuple.
"""
x = xy[0]
y = xy[1]
cth = np.cos(theta)
sth = np.sin(theta)
a = (cth ** 2) / (2 * sx ** 2) + (sth ** 2) / (2 * sy ** 2)
b = -(np.sin(2 * theta)) / (4 * sx ** 2) + (np.sin(2 * theta)) / (
4 * sy ** 2)
c = (sth ** 2) / (2 * sx ** 2) + (cth ** 2) / (2 * sy ** 2)
vals = amplitude - (a * ((x - x0) ** 2) +
2 * b * (x - x0) * (y - y0) +
c * ((y - y0) ** 2))
return vals |
java | static DateTime determineRotationPeriodAnchor(@Nullable DateTime lastAnchor, Period period) {
final Period normalized = period.normalizedStandard();
int years = normalized.getYears();
int months = normalized.getMonths();
int weeks = normalized.getWeeks();
int days = normalized.getDays();
int hours = normalized.getHours();
int minutes = normalized.getMinutes();
int seconds = normalized.getSeconds();
if (years == 0 && months == 0 && weeks == 0 && days == 0 && hours == 0 && minutes == 0 && seconds == 0) {
throw new IllegalArgumentException("Invalid rotation period specified");
}
// find the largest non-zero stride in the period. that's our anchor type. statement order matters here!
DateTimeFieldType largestStrideType = null;
if (seconds > 0) largestStrideType = secondOfMinute();
if (minutes > 0) largestStrideType = minuteOfHour();
if (hours > 0) largestStrideType = hourOfDay();
if (days > 0) largestStrideType = dayOfMonth();
if (weeks > 0) largestStrideType = weekOfWeekyear();
if (months > 0) largestStrideType = monthOfYear();
if (years > 0) largestStrideType = year();
if (largestStrideType == null) {
throw new IllegalArgumentException("Could not determine rotation stride length.");
}
final DateTime anchorTime = MoreObjects.firstNonNull(lastAnchor, Tools.nowUTC());
final DateTimeField field = largestStrideType.getField(anchorTime.getChronology());
// use normalized here to make sure we actually have the largestStride type available! see https://github.com/Graylog2/graylog2-server/issues/836
int periodValue = normalized.get(largestStrideType.getDurationType());
final long fieldValue = field.roundFloor(anchorTime.getMillis());
final int fieldValueInUnit = field.get(fieldValue);
if (periodValue == 0) {
// https://github.com/Graylog2/graylog2-server/issues/836
log.warn("Determining stride length failed because of a 0 period. Defaulting back to 1 period to avoid crashing, but this is a bug!");
periodValue = 1;
}
final long difference = fieldValueInUnit % periodValue;
final long newValue = field.add(fieldValue, -1 * difference);
return new DateTime(newValue, DateTimeZone.UTC);
} |
python | def get_box_folder_location():
"""
Try to locate the Box folder.
Returns:
(str) Full path to the current Box folder
"""
box_prefs_path = ('Library/Application Support/Box/Box Sync/'
'sync_root_folder.txt')
box_home = None
box_prefs = os.path.join(os.environ['HOME'], box_prefs_path)
try:
with open(box_prefs, 'r') as sync_path:
data = sync_path.read()
box_home = data
except IOError:
error("Unable to find your Box prefs =(")
return box_home |
python | def inference(self, kern, X, Z, likelihood, Y, indexD, output_dim, Y_metadata=None, Lm=None, dL_dKmm=None, Kuu_sigma=None):
"""
The first phase of inference:
Compute: log-likelihood, dL_dKmm
Cached intermediate results: Kmm, KmmInv,
"""
input_dim = Z.shape[0]
uncertain_inputs = isinstance(X, VariationalPosterior)
beta = 1./likelihood.variance
if len(beta)==1:
beta = np.zeros(output_dim)+beta
beta_exp = np.zeros(indexD.shape[0])
for d in range(output_dim):
beta_exp[indexD==d] = beta[d]
psi0, psi1, psi2 = self.gatherPsiStat(kern, X, Z, Y, beta, uncertain_inputs)
psi2_sum = (beta_exp[:,None,None]*psi2).sum(0)/output_dim
#======================================================================
# Compute Common Components
#======================================================================
Kmm = kern.K(Z).copy()
if Kuu_sigma is not None:
diag.add(Kmm, Kuu_sigma)
else:
diag.add(Kmm, self.const_jitter)
Lm = jitchol(Kmm)
logL = 0.
dL_dthetaL = np.zeros(output_dim)
dL_dKmm = np.zeros_like(Kmm)
dL_dpsi0 = np.zeros_like(psi0)
dL_dpsi1 = np.zeros_like(psi1)
dL_dpsi2 = np.zeros_like(psi2)
wv = np.empty((Kmm.shape[0],output_dim))
for d in range(output_dim):
idx_d = indexD==d
Y_d = Y[idx_d]
N_d = Y_d.shape[0]
beta_d = beta[d]
psi2_d = psi2[idx_d].sum(0)*beta_d
psi1Y = Y_d.T.dot(psi1[idx_d])*beta_d
psi0_d = psi0[idx_d].sum()*beta_d
YRY_d = np.square(Y_d).sum()*beta_d
LmInvPsi2LmInvT = backsub_both_sides(Lm, psi2_d, 'right')
Lambda = np.eye(Kmm.shape[0])+LmInvPsi2LmInvT
LL = jitchol(Lambda)
LmLL = Lm.dot(LL)
b = dtrtrs(LmLL, psi1Y.T)[0].T
bbt = np.square(b).sum()
v = dtrtrs(LmLL, b.T, trans=1)[0].T
LLinvPsi1TYYTPsi1LLinvT = tdot(b.T)
tmp = -backsub_both_sides(LL, LLinvPsi1TYYTPsi1LLinvT)
dL_dpsi2R = backsub_both_sides(Lm, tmp+np.eye(input_dim))/2
logL_R = -N_d*np.log(beta_d)
logL += -((N_d*log_2_pi+logL_R+psi0_d-np.trace(LmInvPsi2LmInvT))+YRY_d- bbt)/2.
dL_dKmm += dL_dpsi2R - backsub_both_sides(Lm, LmInvPsi2LmInvT)/2
dL_dthetaL[d:d+1] = (YRY_d*beta_d + beta_d*psi0_d - N_d*beta_d)/2. - beta_d*(dL_dpsi2R*psi2_d).sum() - beta_d*np.trace(LLinvPsi1TYYTPsi1LLinvT)
dL_dpsi0[idx_d] = -beta_d/2.
dL_dpsi1[idx_d] = beta_d*np.dot(Y_d,v)
dL_dpsi2[idx_d] = beta_d*dL_dpsi2R
wv[:,d] = v
LmInvPsi2LmInvT = backsub_both_sides(Lm, psi2_sum, 'right')
Lambda = np.eye(Kmm.shape[0])+LmInvPsi2LmInvT
LL = jitchol(Lambda)
LmLL = Lm.dot(LL)
logdet_L = 2.*np.sum(np.log(np.diag(LL)))
dL_dpsi2R_common = dpotri(LmLL)[0]/-2.
dL_dpsi2 += dL_dpsi2R_common[None,:,:]*beta_exp[:,None,None]
for d in range(output_dim):
dL_dthetaL[d] += (dL_dpsi2R_common*psi2[indexD==d].sum(0)).sum()*-beta[d]*beta[d]
dL_dKmm += dL_dpsi2R_common*output_dim
logL += -output_dim*logdet_L/2.
#======================================================================
# Compute dL_dKmm
#======================================================================
# dL_dKmm = dL_dpsi2R - output_dim* backsub_both_sides(Lm, LmInvPsi2LmInvT)/2 #LmInv.T.dot(LmInvPsi2LmInvT).dot(LmInv)/2.
#======================================================================
# Compute the Posterior distribution of inducing points p(u|Y)
#======================================================================
LLInvLmT = dtrtrs(LL, Lm.T)[0]
cov = tdot(LLInvLmT.T)
wd_inv = backsub_both_sides(Lm, np.eye(input_dim)- backsub_both_sides(LL, np.identity(input_dim), transpose='left'), transpose='left')
post = Posterior(woodbury_inv=wd_inv, woodbury_vector=wv, K=Kmm, mean=None, cov=cov, K_chol=Lm)
#======================================================================
# Compute dL_dthetaL for uncertian input and non-heter noise
#======================================================================
# for d in range(output_dim):
# dL_dthetaL[d:d+1] += - beta[d]*beta[d]*(dL_dpsi2R[None,:,:] * psi2[indexD==d]/output_dim).sum()
# dL_dthetaL += - (dL_dpsi2R[None,:,:] * psi2_sum*D beta*(dL_dpsi2R*psi2).sum()
#======================================================================
# Compute dL_dpsi
#======================================================================
if not uncertain_inputs:
dL_dpsi1 += (psi1[:,None,:]*dL_dpsi2).sum(2)*2.
if uncertain_inputs:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dpsi0':dL_dpsi0,
'dL_dpsi1':dL_dpsi1,
'dL_dpsi2':dL_dpsi2,
'dL_dthetaL':dL_dthetaL}
else:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dKdiag':dL_dpsi0,
'dL_dKnm':dL_dpsi1,
'dL_dthetaL':dL_dthetaL}
return post, logL, grad_dict |
python | def check_labels_file_header(filename):
"""Validate that filename corresponds to labels for the MNIST dataset."""
with tf.gfile.Open(filename, 'rb') as f:
magic = read32(f)
read32(f) # num_items, unused
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST file %s' % (magic,
f.name)) |
python | def export(self, image_path, tmptar=None):
'''export will export an image, sudo must be used.
Parameters
==========
image_path: full path to image
tmptar: if defined, use custom temporary path for tar export
'''
from spython.utils import check_install
check_install()
if tmptar is None:
tmptar = "/%s/tmptar.tar" %(tempfile.mkdtemp())
cmd = ['singularity', 'image.export', '-f', tmptar, image_path]
output = self.run_command(cmd, sudo=False)
return tmptar |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.