language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public Date getLastModified(String filename)
throws IOException, ServerException {
if (filename == null) {
throw new IllegalArgumentException("Required argument missing");
}
Command cmd = new Command("MDTM", filename);
Reply reply = null;
try {
reply = controlChannel.execute(cmd);
} catch (FTPReplyParseException rpe) {
throw ServerException.embedFTPReplyParseException(rpe);
} catch (UnexpectedReplyCodeException urce) {
throw ServerException.embedUnexpectedReplyCodeException(
urce,
"Server refused changing transfer mode");
}
if (dateFormat == null) {
dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
}
try {
return dateFormat.parse(reply.getMessage());
} catch (ParseException e) {
throw ServerException.embedFTPReplyParseException(
new FTPReplyParseException(
0,
"Invalid file modification time reply: " + reply));
}
} |
python | def plot_contours(self, grid, filled=True, ax=None, labels=None,
subplots_kw=dict(), **kwargs):
"""
Plot equipotentials contours. Computes the potential energy on a grid
(specified by the array `grid`).
.. warning:: Right now the grid input must be arrays and must already
be in the unit system of the potential. Quantity support is coming...
Parameters
----------
grid : tuple
Coordinate grids or slice value for each dimension. Should be a
tuple of 1D arrays or numbers.
filled : bool (optional)
Use :func:`~matplotlib.pyplot.contourf` instead of
:func:`~matplotlib.pyplot.contour`. Default is ``True``.
ax : matplotlib.Axes (optional)
labels : iterable (optional)
List of axis labels.
subplots_kw : dict
kwargs passed to matplotlib's subplots() function if an axes object
is not specified.
kwargs : dict
kwargs passed to either contourf() or plot().
Returns
-------
fig : `~matplotlib.Figure`
"""
import matplotlib.pyplot as plt
from matplotlib import cm
# figure out which elements are iterable, which are numeric
_grids = []
_slices = []
for ii, g in enumerate(grid):
if isiterable(g):
_grids.append((ii, g))
else:
_slices.append((ii, g))
# figure out the dimensionality
ndim = len(_grids)
# if ndim > 2, don't know how to handle this!
if ndim > 2:
raise ValueError("ndim > 2: you can only make contours on a 2D grid. For other "
"dimensions, you have to specify values to slice.")
if ax is None:
# default figsize
fig, ax = plt.subplots(1, 1, **subplots_kw)
else:
fig = ax.figure
if ndim == 1:
# 1D curve
x1 = _grids[0][1]
r = np.zeros((len(_grids) + len(_slices), len(x1)))
r[_grids[0][0]] = x1
for ii, slc in _slices:
r[ii] = slc
Z = self.energy(r*self.units['length']).value
ax.plot(x1, Z, **kwargs)
if labels is not None:
ax.set_xlabel(labels[0])
ax.set_ylabel("potential")
else:
# 2D contours
x1, x2 = np.meshgrid(_grids[0][1], _grids[1][1])
shp = x1.shape
x1, x2 = x1.ravel(), x2.ravel()
r = np.zeros((len(_grids) + len(_slices), len(x1)))
r[_grids[0][0]] = x1
r[_grids[1][0]] = x2
for ii, slc in _slices:
r[ii] = slc
Z = self.energy(r*self.units['length']).value
# make default colormap not suck
cmap = kwargs.pop('cmap', cm.Blues)
if filled:
cs = ax.contourf(x1.reshape(shp), x2.reshape(shp), Z.reshape(shp),
cmap=cmap, **kwargs)
else:
cs = ax.contour(x1.reshape(shp), x2.reshape(shp), Z.reshape(shp),
cmap=cmap, **kwargs)
if labels is not None:
ax.set_xlabel(labels[0])
ax.set_ylabel(labels[1])
return fig |
java | public synchronized boolean canBeConfigured(HttpContext httpContext) {
return canBeConfigured(httpContext, servletModels)
&& canBeConfigured(httpContext, filterModels.values())
&& canBeConfigured(httpContext, eventListenerModels.values())
&& canBeConfigured(httpContext, errorPageModels.values())
&& canBeConfigured(httpContext, loginConfigModels.values());
} |
java | public byte[] getRL2Image( Geometry geom, String geomEpsg, int width, int height ) throws Exception {
String sql;
String rasterName = getName();
if (geomEpsg != null) {
sql = "select GetMapImageFromRaster('" + rasterName + "', ST_Transform(ST_GeomFromText('" + geom.toText() + "', "
+ geomEpsg + "), " + srid + ") , " + width + " , " + height
+ ", 'default', 'image/png', '#ffffff', 0, 80, 1 )";
} else {
sql = "select GetMapImageFromRaster('" + rasterName + "', ST_GeomFromText('" + geom.toText() + "') , " + width + " , "
+ height + ", 'default', 'image/png', '#ffffff', 0, 80, 1 )";
}
return database.execOnConnection(mConn -> {
try (IHMStatement stmt = mConn.createStatement()) {
IHMResultSet resultSet = stmt.executeQuery(sql);
if (resultSet.next()) {
byte[] bytes = resultSet.getBytes(1);
return bytes;
}
}
return null;
});
} |
java | public InputStream getAvatar() {
URL url = USER_AVATAR_TEMPLATE.build(this.getAPI().getBaseURL(), this.getID());
BoxAPIRequest request = new BoxAPIRequest(this.getAPI(), url, "GET");
BoxAPIResponse response = request.send();
return response.getBody();
} |
java | private JsonWriter close(int empty, int nonempty, String closeBracket) throws IOException {
int context = peekScope();
if (context != nonempty && context != empty) {
throw new IllegalStateException("Nesting problem.");
}
if (deferredName != null) {
throw new IllegalStateException("Dangling name: " + deferredName);
}
stackSize--;
pathNames[stackSize] = null; // Free the last path name so that it can be garbage collected!
pathIndices[stackSize - 1]++;
if (context == nonempty) {
newline();
}
sink.writeUtf8(closeBracket);
return this;
} |
java | public DefaultTableColumn addPropertyColumn(Object headerValue, String property, Class type) {
return addColumn(headerValue, property, new PropertyModel(rowModel, property, type));
} |
java | public static boolean eq(String s1, String s2, int modifier) {
return isEqual(s1, s2, modifier);
} |
python | def get_collection(self, collection, filter=None, fields=None,
page_size=None):
"""
Returns a specific collection from the asset service with
the given collection endpoint.
Supports passing through parameters such as...
- filters such as "name=Vesuvius" following GEL spec
- fields such as "uri,description" comma delimited
- page_size such as "100" (the default)
"""
params = {}
if filter:
params['filter'] = filter
if fields:
params['fields'] = fields
if page_size:
params['pageSize'] = page_size
uri = self.uri + '/v1' + collection
return self.service._get(uri, params=params) |
python | def get_effective_domain_id(request):
"""Gets the id of the default domain.
If the requests default domain is the same as DEFAULT_DOMAIN,
return None.
"""
default_domain = get_default_domain(request)
domain_id = default_domain.get('id')
domain_name = default_domain.get('name')
return None if domain_name == DEFAULT_DOMAIN else domain_id |
java | public static @Nonnull Entry<String,JsonElement> field(String key, JsonElement value) {
Entry<String, JsonElement> entry = new Entry<String,JsonElement>() {
@Override
public String getKey() {
return key;
}
@Override
public JsonElement getValue() {
return value;
}
@Override
public JsonElement setValue(JsonElement value) {
throw new UnsupportedOperationException("entries are immutable");
}};
return entry;
} |
java | private void writeObject(ObjectOutputStream out) throws IOException
{
out.defaultWriteObject();
out.writeInt(table.length);
// Have to use null-terminated list because size might shrink
// during iteration
for (Iterator iter = entrySet().iterator(); iter.hasNext();)
{
Map.Entry entry = (Map.Entry) iter.next();
out.writeObject(entry.getKey());
out.writeObject(entry.getValue());
}
out.writeObject(null);
} |
python | def get_unaligned_start_coord(self):
"""
.. warning:: not implemented
"""
sys.stderr.write("error unimplemented get_unaligned_start_coord\n")
sys.exit()
if len(self._unaligned)==0: return None
return [self._lines[self._unaligned[0]-1]['filestart'],self._lines[self._unaligned[0]-1]['innerstart']] |
java | public static Map groupBy(Iterable self, List<Closure> closures) {
return groupBy(self, closures.toArray());
} |
java | @Override
protected void deserializeBytes20to23 (final int line) throws InternetSCSIException {
connectionID = (line & Constants.FIRST_TWO_BYTES_MASK) >>> Constants.TWO_BYTES_SHIFT;
Utils.isReserved(line & Constants.LAST_TWO_BYTES_MASK);
} |
java | public SerialArrayList<U> add(SerialArrayList<U> right) {
return new SerialArrayList<U>(factory, this, right);
} |
python | def _encrypted_data_keys_hash(hasher, encrypted_data_keys):
"""Generates the expected hash for the provided encrypted data keys.
:param hasher: Existing hasher to use
:type hasher: cryptography.hazmat.primitives.hashes.Hash
:param iterable encrypted_data_keys: Encrypted data keys to hash
:returns: Concatenated, sorted, list of all hashes
:rtype: bytes
"""
hashed_keys = []
for edk in encrypted_data_keys:
serialized_edk = serialize_encrypted_data_key(edk)
_hasher = hasher.copy()
_hasher.update(serialized_edk)
hashed_keys.append(_hasher.finalize())
return b"".join(sorted(hashed_keys)) |
java | protected void registerIbmAlpn(SSLEngine engine, boolean useAlpn) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "registerIbmAlpn entry " + engine);
}
try {
// invoke ALPNJSSEExt.put(engine, String[] protocols)
String[] protocols;
if (useAlpn) {
protocols = new String[] { h2, h1 };
} else {
// don't pass any protocols; alpn not used
protocols = new String[] {};
}
ibmAlpnPut.invoke(null, engine, protocols);
} catch (InvocationTargetException ie) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "registerIbmAlpn exception: " + ie.getTargetException());
}
} catch (Exception e) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "registerIbmAlpn exception: " + e);
}
}
} |
java | public Node nextNode() {
if (currentNode == null)
return null;
Node result = getFirstChild(currentNode);
if (result != null) {
currentNode = result;
return result;
}
result = getNextSibling(currentNode);
if (result != null) {
currentNode = result;
return result;
}
// return parent's 1st sibling.
Node parent = getParentNode(currentNode);
while (parent != null) {
result = getNextSibling(parent);
if (result != null) {
currentNode = result;
return result;
} else {
parent = getParentNode(parent);
}
}
// end , return null
return null;
} |
java | public void setFileContents(final String key, final byte[] contents) {
MockFileItem fileItem = new MockFileItem();
fileItem.set(contents);
files.put(key, new FileItem[]{fileItem});
} |
java | protected CompletionStage<ValidResponse> singleWriteOnRemotePrimary(Address target, DataWriteCommand command) {
return rpcManager.invokeCommand(target, command, SingleResponseCollector.validOnly(), rpcManager.getSyncRpcOptions());
} |
python | def _make_valid_bounds(self, test_bounds):
"""
Private method: process input bounds into a form acceptable by scipy.optimize,
and check the validity of said bounds.
:param test_bounds: minimum and maximum weight of an asset
:type test_bounds: tuple
:raises ValueError: if ``test_bounds`` is not a tuple of length two.
:raises ValueError: if the lower bound is too high
:return: a tuple of bounds, e.g ((0, 1), (0, 1), (0, 1) ...)
:rtype: tuple of tuples
"""
if len(test_bounds) != 2 or not isinstance(test_bounds, tuple):
raise ValueError(
"test_bounds must be a tuple of (lower bound, upper bound)"
)
if test_bounds[0] is not None:
if test_bounds[0] * self.n_assets > 1:
raise ValueError("Lower bound is too high")
return (test_bounds,) * self.n_assets |
java | private void addHours(ProjectCalendarDateRanges ranges, Record hoursRecord)
{
if (hoursRecord.getValue() != null)
{
String[] wh = hoursRecord.getValue().split("\\|");
try
{
String startText;
String endText;
if (wh[0].equals("s"))
{
startText = wh[1];
endText = wh[3];
}
else
{
startText = wh[3];
endText = wh[1];
}
// for end time treat midnight as midnight next day
if (endText.equals("00:00"))
{
endText = "24:00";
}
Date start = m_calendarTimeFormat.parse(startText);
Date end = m_calendarTimeFormat.parse(endText);
ranges.addRange(new DateRange(start, end));
}
catch (ParseException e)
{
// silently ignore date parse exceptions
}
}
} |
java | private static int lookup(final TSDB tsdb,
final boolean use_data_table,
final String[] args) throws Exception {
if (!use_data_table) {
tsdb.getClient().ensureTableExists(
tsdb.getConfig().getString(
"tsd.storage.hbase.meta_table")).joinUninterruptibly();
}
final SearchQuery query = new SearchQuery();
query.setType(SearchType.LOOKUP);
int index = 1;
if (!args[index].contains("=")) {
query.setMetric(args[index++]);
}
final List<Pair<String, String>> tags =
new ArrayList<Pair<String, String>>(args.length - index);
for (; index < args.length; index++) {
Tags.parse(tags, args[index]);
}
query.setTags(tags);
if (use_data_table) {
query.setUseMeta(false);
LOG.warn("NOTE: Scanning the full data table may take a long time");
}
final TimeSeriesLookup lookup = new TimeSeriesLookup(tsdb, query);
lookup.setToStdout(true);
lookup.lookup();
return 0;
} |
java | @Override
public void render(final String word, BufferedImage image) {
Graphics2D g = image.createGraphics();
RenderingHints hints = new RenderingHints(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
hints.add(new RenderingHints(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY));
g.setRenderingHints(hints);
AttributedString as = new AttributedString(word);
as.addAttribute(TextAttribute.FONT, getRandomFont());
FontRenderContext frc = g.getFontRenderContext();
AttributedCharacterIterator aci = as.getIterator();
TextLayout tl = new TextLayout(aci, frc);
int xBaseline = (int) Math.round(image.getWidth() * XOFFSET);
int yBaseline = image.getHeight() - (int) Math.round(image.getHeight() * YOFFSET);
Shape shape = tl.getOutline(AffineTransform.getTranslateInstance(xBaseline, yBaseline));
g.setColor(getRandomColor());
g.setStroke(new BasicStroke(_strokeWidth));
g.draw(shape);
} |
java | @Override
public final ProcessorResult processAttribute(final Arguments arguments,
final Element element, final String attributeName) {
NestableNode parent = element.getParent();
final String fieldNameExpr = element.getAttributeValue(attributeName);
final String fieldName;
if (fieldNameExpr.contains("#") || fieldNameExpr.contains("$")) {
fieldName = parse(arguments, fieldNameExpr);
} else {
fieldName = fieldNameExpr;
}
if (validFieldNodes.contains(element.getNormalizedName())) {
addAttributesToInputElement(element, fieldName);
}
element.removeAttribute(attributeName);
String labelExpr = element.getAttributeValue(BS_LABEL);
if (labelExpr == null) {
labelExpr = createLabelExpression(fieldName);
}
final String label;
if (labelExpr.contains("#") || labelExpr.contains("$")) {
label = parse(arguments, labelExpr);
} else {
label = labelExpr;
}
element.removeAttribute(BS_LABEL);
// Reorganize the DOM
final Node newNode =
createBootstrapField(fieldName, label, element, showError());
parent.insertAfter(element, newNode);
parent.removeChild(element);
return ProcessorResult.OK;
} |
python | def create(self, handle=None, handle_type=None, **args):
"""
Creates an ontology based on a handle
Handle is one of the following
- `FILENAME.json` : creates an ontology from an obographs json file
- `obo:ONTID` : E.g. obo:pato - creates an ontology from obolibrary PURL (requires owltools)
- `ONTID` : E.g. 'pato' - creates an ontology from a remote SPARQL query
Arguments
---------
handle : str
specifies how to retrieve the ontology info
"""
if handle is None:
self.test = self.test+1
logging.info("T: "+str(self.test))
global default_ontology
if default_ontology is None:
logging.info("Creating new instance of default ontology")
default_ontology = create_ontology(default_ontology_handle)
logging.info("Using default_ontology")
return default_ontology
return create_ontology(handle, **args) |
python | def update_args(self, override_args):
"""Update the argument used to invoke the application
Note that this will also update the dictionary of input
and output files.
Parameters
-----------
override_args : dict
dictionary passed to the links
"""
self.args = extract_arguments(override_args, self.args)
self._map_arguments(self.args)
scratch_dir = self.args.get('scratch', None)
if is_not_null(scratch_dir):
self._file_stage = FileStageManager(scratch_dir, '.')
for link in self._links.values():
link._set_file_stage(self._file_stage)
self._latch_file_info() |
java | public static void processMessageToMatchSession(HttpMessage message, HttpSession session) {
processMessageToMatchSession(message, message.getRequestHeader().getHttpCookies(), session);
} |
java | public static void readXLSXRuleFile(String xlsxFileName, HashMap<Integer, ContextRule> rules,
HashMap<String, TypeDefinition> conceptFeaturesMap,
HashMap<String, String> featureDefaultValueMap,
HashMap<String, String> valueFeatureNameMap) {
try {
FileInputStream inputStream = new FileInputStream(new File(xlsxFileName));
Workbook workbook = new XSSFWorkbook(inputStream);
Sheet firstSheet = workbook.getSheetAt(0);
Iterator<Row> iterator = firstSheet.iterator();
int id = 0;
while (iterator.hasNext()) {
Row nextRow = iterator.next();
Iterator<Cell> cellIterator = nextRow.cellIterator();
ArrayList<String> cells = new ArrayList<>();
while (cellIterator.hasNext()) {
Cell cell = cellIterator.next();
switch (cell.getCellTypeEnum()) {
case NUMERIC:
cells.add(cell.getNumericCellValue() + "");
break;
default:
cells.add(cell.getStringCellValue());
break;
}
}
if (cells.size() > 0 && cells.get(0).trim().length() > 0)
parseCells(cells, id, rules, conceptFeaturesMap, featureDefaultValueMap, valueFeatureNameMap);
id++;
}
} catch (IOException e) {
e.printStackTrace();
}
} |
java | protected String getDefaultTimeFormatIfNull(Member c2jMemberDefinition, Map<String, Shape> allC2jShapes, String protocolString, Shape parentShape) {
String timestampFormat = c2jMemberDefinition.getTimestampFormat();
if (!StringUtils.isNullOrEmpty(timestampFormat)) {
failIfInCollection(c2jMemberDefinition, parentShape);
return TimestampFormat.fromValue(timestampFormat).getFormat();
}
String shapeName = c2jMemberDefinition.getShape();
Shape shape = allC2jShapes.get(shapeName);
if (!StringUtils.isNullOrEmpty(shape.getTimestampFormat())) {
failIfInCollection(c2jMemberDefinition, parentShape);
return TimestampFormat.fromValue(shape.getTimestampFormat()).getFormat();
}
String location = c2jMemberDefinition.getLocation();
if (Location.HEADER.toString().equals(location)) {
return defaultHeaderTimestamp();
}
if (Location.QUERY_STRING.toString().equals(location)) {
return TimestampFormat.ISO_8601.getFormat();
}
Protocol protocol = Protocol.fromValue(protocolString);
switch (protocol) {
case REST_XML:
case QUERY:
case EC2:
case API_GATEWAY:
return TimestampFormat.ISO_8601.getFormat();
case ION:
case REST_JSON:
case AWS_JSON:
return TimestampFormat.UNIX_TIMESTAMP.getFormat();
case CBOR:
return TimestampFormat.UNIX_TIMESTAMP_IN_MILLIS.getFormat();
}
throw new RuntimeException("Cannot determine timestamp format for protocol " + protocol);
} |
java | public BooleanProperty isEmptyProperty() {
if (this.isEmpty == null) {
this.isEmpty = new SimpleBooleanProperty(this, MathFXAttributeNames.IS_EMPTY);
this.isEmpty.bind(Bindings.createBooleanBinding(() -> {
final PathIterator2ai<PathElement2ifx> pi = getPathIterator();
while (pi.hasNext()) {
final PathElement2ifx pe = pi.next();
if (pe.isDrawable()) {
return false;
}
}
return true;
}, innerTypesProperty(), innerPointsProperty()));
}
return this.isEmpty;
} |
java | public CmsOrganizationalUnit createOrganizationalUnit(
CmsObject cms,
String ouFqn,
String description,
int flags,
String resourceName) throws CmsException {
CmsResource resource = null;
if (((flags & CmsOrganizationalUnit.FLAG_WEBUSERS) == 0) || (resourceName != null)) {
// only normal OUs have to have at least one resource
resource = cms.readResource(resourceName);
}
return m_securityManager.createOrganizationalUnit(cms.getRequestContext(), ouFqn, description, flags, resource);
} |
python | def stop_timer(self, request_len, reply_len, server_time=None,
exception=False):
"""
This is a low-level method is called by pywbem at the end of an
operation. It completes the measurement for that operation by capturing
the needed data, and updates the statistics data, if statistics is
enabled for the connection.
Parameters:
request_len (:term:`integer`)
Size of the HTTP body of the CIM-XML request message, in Bytes.
reply_len (:term:`integer`)
Size of the HTTP body of the CIM-XML response message, in Bytes.
exception (:class:`py:bool`)
Boolean that specifies whether an exception was raised while
processing the operation.
server_time (:class:`py:bool`)
Time in seconds that the server optionally returns to the
client in the HTTP response defining the time from when the
server received the request to when it started sending the
response. If `None`, there is no time from the server.
Returns:
float: The elapsed time for the operation that just ended, or
`None` if the statistics container holding this object is not
enabled.
"""
if not self.container.enabled:
return None
# stop the timer
if self._start_time is None:
raise RuntimeError('stop_timer() called without preceding '
'start_timer()')
dt = time.time() - self._start_time
self._start_time = None
self._count += 1
self._time_sum += dt
self._request_len_sum += request_len
self._reply_len_sum += reply_len
if exception:
self._exception_count += 1
if dt > self._time_max:
self._time_max = dt
if dt < self._time_min:
self._time_min = dt
if server_time:
self._server_time_stored = True
self._server_time_sum += server_time
if dt > self._server_time_max:
self._server_time_max = server_time
if dt < self._server_time_min:
self._server_time_min = server_time
if request_len > self._request_len_max:
self._request_len_max = request_len
if request_len < self._request_len_min:
self._request_len_min = request_len
if reply_len > self._reply_len_max:
self._reply_len_max = reply_len
if reply_len < self._reply_len_min:
self._reply_len_min = reply_len
return dt |
python | def create_remoteckan(cls, site_url, user_agent=None, user_agent_config_yaml=None, user_agent_lookup=None,
session=None, **kwargs):
# type: (str, Optional[str], Optional[str], Optional[str], requests.Session, Any) -> ckanapi.RemoteCKAN
"""
Create remote CKAN instance from configuration
Args:
site_url (str): Site url.
user_agent (Optional[str]): User agent string. HDXPythonLibrary/X.X.X- is prefixed.
user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yml.
user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied.
session (requests.Session): requests Session object to use. Defaults to calling hdx.utilities.session.get_session()
Returns:
ckanapi.RemoteCKAN: Remote CKAN instance
"""
if not session:
session = get_session(user_agent, user_agent_config_yaml, user_agent_lookup, prefix=Configuration.prefix,
method_whitelist=frozenset(['HEAD', 'TRACE', 'GET', 'POST', 'PUT',
'OPTIONS', 'DELETE']), **kwargs)
ua = session.headers['User-Agent']
else:
ua = kwargs.get('full_agent')
if not ua:
ua = UserAgent.get(user_agent, user_agent_config_yaml, user_agent_lookup, prefix=Configuration.prefix,
**kwargs)
return ckanapi.RemoteCKAN(site_url, user_agent=ua, session=session) |
python | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
C = self.COEFFS[imt]
mean = self._get_mean(
C, rup.mag, rup.rake, rup.dip, dists.rrup, dists.rjb
)
stddevs = self._get_stddevs(C, rup.mag, stddev_types, dists.rrup.size)
return mean, stddevs |
java | public ListDevicesResult withDevices(DeviceSummary... devices) {
if (this.devices == null) {
setDevices(new java.util.ArrayList<DeviceSummary>(devices.length));
}
for (DeviceSummary ele : devices) {
this.devices.add(ele);
}
return this;
} |
java | public void setTimex2(FSArray v) {
if (Document_Type.featOkTst && ((Document_Type)jcasType).casFeat_timex2 == null)
jcasType.jcas.throwFeatMissing("timex2", "de.julielab.jules.types.ace.Document");
jcasType.ll_cas.ll_setRefValue(addr, ((Document_Type)jcasType).casFeatCode_timex2, jcasType.ll_cas.ll_getFSRef(v));} |
python | def fetch_token(self, **kwargs):
"""Fetch a new token using the supplied code.
:param str code: A previously obtained auth code.
"""
if 'client_secret' not in kwargs:
kwargs.update(client_secret=self.client_secret)
return self.session.fetch_token(token_url, **kwargs) |
java | public CloudPool get(String poolId, PoolGetOptions poolGetOptions) {
return getWithServiceResponseAsync(poolId, poolGetOptions).toBlocking().single().body();
} |
python | def _set_config_mode(self, v, load=False):
"""
Setter method for config_mode, mapped from YANG variable /interface/fc_port/config_mode (interface-fc-config-mode-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_config_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config_mode() directly.
YANG Description: Port Mode Configuration
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'auto': {'value': 1}, u'nport': {'value': 3}, u'eport': {'value': 2}, u'fport': {'value': 4}},), default=unicode("auto"), is_leaf=True, yang_name="config-mode", rest_name="config-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure port as F/E/N/Auto', u'cli-incomplete-no': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='interface-fc-config-mode-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config_mode must be of a type compatible with interface-fc-config-mode-type""",
'defined-type': "brocade-interface:interface-fc-config-mode-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'auto': {'value': 1}, u'nport': {'value': 3}, u'eport': {'value': 2}, u'fport': {'value': 4}},), default=unicode("auto"), is_leaf=True, yang_name="config-mode", rest_name="config-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure port as F/E/N/Auto', u'cli-incomplete-no': None, u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='interface-fc-config-mode-type', is_config=True)""",
})
self.__config_mode = t
if hasattr(self, '_set'):
self._set() |
java | public static String rtrimWildcardTokens(String input) {
String[] tokens = tokenizePathAsArray(input);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < tokens.length; i++) {
if (hasWildcards(tokens[i])) {
break;
}
if (i > 0 && sb.charAt(sb.length() - 1) != File.separatorChar) {
sb.append(File.separatorChar);
}
sb.append(tokens[i]);
}
return sb.toString();
} |
python | def _parse_g_dir(repo, gdirpath):
"""parses a repo directory two-levels deep"""
for f in repo.get_contents(gdirpath):
if f.type == "dir":
for sf in repo.get_contents(f.path):
yield sf
else:
yield f |
java | public void shutdown() {
// Prevent new entries from being added ...
this.addEntries.set(false);
// Mark the cursor as being finished; this will stop all consumers from waiting for a batch ...
this.cursor.complete();
// Each of the consumer threads will complete the batch they're working on, but will then terminate ...
// Stop the garbage collection thread (if running) ...
if (this.gcConsumer != null) this.gcConsumer.close();
// Now, block until all the runners have completed ...
for (ConsumerRunner runner : new HashSet<>(consumers)) { // use a copy of the runners; they're removed when they close
runner.waitForCompletion();
}
assert consumers.isEmpty();
} |
java | static byte[] generateClassBytes(String proxyClassName,
Class proxyInterface,
Method[] proxyMethods,
EJBMethodInfoImpl[] methodInfos,
String ejbClassName,
String beanName)
throws EJBConfigurationException
{
// ASM uses 'internal' java class names (like JNI) where '/' is
// used instead of '.', so convert the parameters to 'internal' format.
String internalClassName = convertClassName(proxyClassName);
String internalInterfaceName = convertClassName(proxyInterface);
String internalEJBClassName = convertClassName(ejbClassName);
String internalParentName = "com/ibm/ejs/container/WSEJBProxy";
String[] internalImplements = (internalInterfaceName != null)
? (new String[] { internalInterfaceName }) : null;
if (TraceComponent.isAnyTracingEnabled())
{
if (tc.isEntryEnabled())
Tr.entry(tc, "generateClassBytes");
if (tc.isDebugEnabled())
{
Tr.debug(tc, INDENT + "className = " + internalClassName);
Tr.debug(tc, INDENT + "interface = " + internalInterfaceName);
Tr.debug(tc, INDENT + "parent = " + internalParentName);
Tr.debug(tc, INDENT + "ejb = " + internalEJBClassName);
}
}
// Create the ASM Class Writer to write out a proxy
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_MAXS); //F743-11995
// Define the proxy Class object
cw.visit(V1_2, ACC_PUBLIC + ACC_SUPER,
internalClassName,
null,
internalParentName,
internalImplements);
// Define the source code file and debug settings
String sourceFileName = proxyClassName.substring(proxyClassName.lastIndexOf(".") + 1) + ".java";
cw.visitSource(sourceFileName, null);
// Add the public no parameter proxy constructor
addCtor(cw, internalParentName);
// Add all of the methods to the proxy, based on the reflected
// Method objects from the interface.
int methodId = -1;
for (int i = 0; i < proxyMethods.length; ++i)
{
Method method = proxyMethods[i];
String implMethodName = method.getName();
// Business methods must not start with "ejb", per spec.
if (implMethodName.startsWith("ejb"))
{
String interfaceName = ((proxyInterface != null) ? proxyInterface.getName()
: "WebService Endpoint");
// Log the error and throw meaningful exception.
Tr.error(tc, "JIT_INVALID_MTHD_PREFIX_CNTR5010E",
new Object[] { beanName,
interfaceName,
implMethodName });
throw new EJBConfigurationException("EJB business method " + implMethodName +
" on interface " + interfaceName +
" must not start with 'ejb'.");
}
// Determine the Method Id that will be hard coded into the
// proxy to allow preInvoke to quickly find the correct method.
methodId = getMethodId(method, proxyMethods, ++methodId);
// Determine if interceptors are called from methodinfo.
EJBMethodInfoImpl methodInfo = methodInfos[methodId];
boolean aroundInvoke =
(methodInfo.getAroundInterceptorProxies() != null); // F743-17763.1
// When 'generics' are used, like for the Provider interface, the
// signature of the method on the bean implemenation will be different
// from that of the interface. Since WebServices will be calling using
// the signature of the implementation, generate the methods using
// the ejb method (taken from methodInfo). The 'interface' method
// would exist on the ejb, but as a 'bridge' method, and may not be
// needed on the proxy. d540438
Method ejbMethod = methodInfo.getMethod();
if (aroundInvoke)
{
addEJBInterceptorMethod(cw,
internalClassName,
internalEJBClassName,
ejbMethod);
}
else
{
addEJBMethod(cw,
internalClassName,
internalEJBClassName,
ejbMethod);
}
// If the Prxoy implements an interface, then the 'bridge' method
// is also needed (if there is one). This method will not be added
// as a 'bridge' method, but just as a normal method, because although
// the customer may have used generics to define the EJB, generics
// are NOT usd when defining the Proxy. d540438
if (proxyInterface != null)
{
Method bridgeMethod = methodInfo.getBridgeMethod();
if (bridgeMethod != null)
{
if (aroundInvoke)
{
addEJBInterceptorMethod(cw,
internalClassName,
internalEJBClassName,
bridgeMethod);
}
else
{
addEJBMethod(cw,
internalClassName,
internalEJBClassName,
bridgeMethod);
}
} // end if bridgeMethod
} // end if proxyInterface
} // end proxyMethods loop
// Mark the end of the generated proxy class
cw.visitEnd();
// Dump the class bytes out to a byte array.
byte[] classBytes = cw.toByteArray();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
writeToClassFile(internalClassName, classBytes);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "generateClassBytes: " + classBytes.length + " bytes");
return classBytes;
} |
python | def object_emitter(target, source, env, parent_emitter):
"""Sets up the PCH dependencies for an object file."""
validate_vars(env)
parent_emitter(target, source, env)
# Add a dependency, but only if the target (e.g. 'Source1.obj')
# doesn't correspond to the pre-compiled header ('Source1.pch').
# If the basenames match, then this was most likely caused by
# someone adding the source file to both the env.PCH() and the
# env.Program() calls, and adding the explicit dependency would
# cause a cycle on the .pch file itself.
#
# See issue #2505 for a discussion of what to do if it turns
# out this assumption causes trouble in the wild:
# http://scons.tigris.org/issues/show_bug.cgi?id=2505
if 'PCH' in env:
pch = env['PCH']
if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj':
env.Depends(target, pch)
return (target, source) |
python | def tag_and_push(context):
"""Tags your git repo with the new version number"""
tag_option = '--annotate'
if probe.has_signing_key(context):
tag_option = '--sign'
shell.dry_run(
TAG_TEMPLATE % (tag_option, context.new_version, context.new_version),
context.dry_run,
)
shell.dry_run('git push --tags', context.dry_run) |
java | @Override
public void removeByUuid_C(String uuid, long companyId) {
for (CommercePriceList commercePriceList : findByUuid_C(uuid,
companyId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) {
remove(commercePriceList);
}
} |
python | def as_index(keys, axis=semantics.axis_default, base=False, stable=True, lex_as_struct=False):
"""
casting rules for a keys object to an index object
the preferred semantics is that keys is a sequence of key objects,
except when keys is an instance of tuple,
in which case the zipped elements of the tuple are the key objects
the axis keyword specifies the axis which enumerates the keys
if axis is None, the keys array is flattened
if axis is 0, the first axis enumerates the keys
which of these two is the default depends on whether backwards_compatible == True
if base==True, the most basic index possible is constructed.
this avoids an indirect sort; if it isnt required, this has better performance
"""
if isinstance(keys, Index):
if type(keys) is BaseIndex and base==False:
keys = keys.keys #need to upcast to an indirectly sorted index type
else:
return keys #already done here
if isinstance(keys, tuple):
if lex_as_struct:
keys = as_struct_array(*keys)
else:
return LexIndex(keys, stable)
try:
keys = np.asarray(keys)
except:
raise TypeError('Given object does not form a valid set of keys')
if axis is None:
keys = keys.flatten()
if keys.ndim==1:
if base:
return BaseIndex(keys)
else:
return Index(keys, stable=stable)
else:
return ObjectIndex(keys, axis, stable=stable) |
python | def _find_by_nsp(self, browser, criteria, tag, constraints):
"""Find element matches by iOSNsPredicateString."""
return self._filter_elements(
browser.find_elements_by_ios_predicate(criteria),
tag, constraints) |
python | def encode(self, b64=False, always_bytes=True):
"""Encode the packet for transmission."""
if self.binary and not b64:
encoded_packet = six.int2byte(self.packet_type)
else:
encoded_packet = six.text_type(self.packet_type)
if self.binary and b64:
encoded_packet = 'b' + encoded_packet
if self.binary:
if b64:
encoded_packet += base64.b64encode(self.data).decode('utf-8')
else:
encoded_packet += self.data
elif isinstance(self.data, six.string_types):
encoded_packet += self.data
elif isinstance(self.data, dict) or isinstance(self.data, list):
encoded_packet += self.json.dumps(self.data,
separators=(',', ':'))
elif self.data is not None:
encoded_packet += str(self.data)
if always_bytes and not isinstance(encoded_packet, binary_types):
encoded_packet = encoded_packet.encode('utf-8')
return encoded_packet |
java | public Object invokeVisit(Object visitor, Object argument) {
Assert.notNull(visitor, "The visitor to visit is required");
// Perform call back on the visitor through reflection.
Method method = getMethod(visitor.getClass(), argument);
if (method == null) {
if (logger.isWarnEnabled()) {
logger.warn("No method found by reflection for visitor class ["
+ visitor.getClass().getName()
+ "] and argument of type ["
+ (argument != null ? argument.getClass().getName()
: "") + "]");
}
return null;
}
try {
Object[] args = null;
if (argument != null) {
args = new Object[] { argument };
}
if (!Modifier.isPublic(method.getModifiers())
&& !method.isAccessible()) {
method.setAccessible(true);
}
return method.invoke(visitor, args);
} catch (Exception ex) {
ReflectionUtils.handleReflectionException(ex);
throw new IllegalStateException("Should never get here");
}
} |
java | public List<CmsVisitEntry> readVisits(CmsDbContext dbc, String poolName, CmsVisitEntryFilter filter)
throws CmsDataAccessException {
List<CmsVisitEntry> entries = new ArrayList<CmsVisitEntry>();
Connection conn = null;
PreparedStatement stmt = null;
ResultSet res = null;
try {
if (CmsStringUtil.isNotEmpty(poolName)) {
conn = m_sqlManager.getConnection(poolName);
} else {
conn = m_sqlManager.getConnection(dbc);
}
// compose statement
StringBuffer queryBuf = new StringBuffer(256);
queryBuf.append(m_sqlManager.readQuery("C_VISIT_READ_ENTRIES"));
CmsPair<String, List<I_CmsPreparedStatementParameter>> conditionsAndParameters = prepareVisitConditions(
filter);
List<I_CmsPreparedStatementParameter> params = conditionsAndParameters.getSecond();
queryBuf.append(conditionsAndParameters.getFirst());
if (LOG.isDebugEnabled()) {
LOG.debug(queryBuf.toString());
}
stmt = m_sqlManager.getPreparedStatementForSql(conn, queryBuf.toString());
for (int i = 0; i < params.size(); i++) {
I_CmsPreparedStatementParameter param = params.get(i);
param.insertIntoStatement(stmt, i + 1);
}
// execute
res = stmt.executeQuery();
while (res.next()) {
// get results
entries.add(internalReadVisitEntry(res));
}
} catch (SQLException e) {
throw new CmsDbSqlException(
Messages.get().container(Messages.ERR_GENERIC_SQL_1, CmsDbSqlException.getErrorQuery(stmt)),
e);
} finally {
m_sqlManager.closeAll(dbc, conn, stmt, res);
}
return entries;
} |
java | public StringArray getGeneSymbolList() {
if (ManualDescriptor_Type.featOkTst && ((ManualDescriptor_Type)jcasType).casFeat_geneSymbolList == null)
jcasType.jcas.throwFeatMissing("geneSymbolList", "de.julielab.jules.types.pubmed.ManualDescriptor");
return (StringArray)(jcasType.ll_cas.ll_getFSForRef(jcasType.ll_cas.ll_getRefValue(addr, ((ManualDescriptor_Type)jcasType).casFeatCode_geneSymbolList)));} |
java | private static IAnnotationBinding getAnnotationBinding(
IAnnotationBinding[] annotations, Class<?> annotationClass) {
if (annotationClass == null) {
throw new NullPointerException();
}
return getAnnotationBinding(annotations, annotationClass.getCanonicalName());
} |
python | def _inspect_class(cls, subclass):
"""
Args:
cls(:py:class:`Plugin`): Parent class
subclass(:py:class:`Plugin`): Subclass to evaluate
Returns:
Result: Named tuple
Inspect subclass for inclusion
Values for errorcode:
* 0: No error
Error codes between 0 and 100 are not intended for import
* 50 Skipload flag is True
Error codes between 99 and 200 are excluded from import
* 156: Skipload call returned True
Error codes 200 and above are malformed classes
* 210: Missing abstract property
* 211: Missing abstract static method
* 212: Missing abstract class method
* 213: Missing abstract method
* 214: Missing abstract attribute
* 220: Argument spec does not match
"""
if callable(subclass._skipload_):
result = subclass._skipload_()
if isinstance(result, tuple):
skip, msg = result
else:
skip, msg = result, None
if skip:
return Result(False, msg, 156)
elif subclass._skipload_:
return Result(False, 'Skipload flag is True', 50)
return _check_methods(cls, subclass) |
python | def bulk_index(cls, documents, id_field='id', es=None, index=None):
"""Adds or updates a batch of documents.
:arg documents: List of Python dicts representing individual
documents to be added to the index
.. Note::
This must be serializable into JSON.
:arg id_field: The name of the field to use as the document
id. This defaults to 'id'.
:arg es: The `Elasticsearch` to use. If you don't specify an
`Elasticsearch`, it'll use `cls.get_es()`.
:arg index: The name of the index to use. If you don't specify one
it'll use `cls.get_index()`.
.. Note::
If you need the documents available for searches
immediately, make sure to refresh the index by calling
``refresh_index()``.
"""
if es is None:
es = cls.get_es()
if index is None:
index = cls.get_index()
documents = (dict(d, _id=d[id_field]) for d in documents)
bulk_index(
es,
documents,
index=index,
doc_type=cls.get_mapping_type_name(),
raise_on_error=True
) |
java | public Class<?> reflectClass(final String className) {
Preconditions.checkArgument(className != null && className.trim().length() > 0, "className cannot be null or empty");
return provider.getClassReflectionProvider(className).reflectClass();
} |
java | protected int getOptionIndex(final Object option) {
int optionCount = 0;
List<?> options = getOptions();
if (options != null) {
for (Object obj : getOptions()) {
if (obj instanceof OptionGroup) {
List<?> groupOptions = ((OptionGroup) obj).getOptions();
int groupIndex = groupOptions.indexOf(option);
if (groupIndex != -1) {
return optionCount + groupIndex;
}
optionCount += groupOptions.size();
} else if (Util.equals(option, obj)) {
return optionCount;
} else {
optionCount++;
}
}
}
return -1;
} |
python | def greenkhorn(a, b, M, reg, numItermax=10000, stopThr=1e-9, verbose=False, log=False):
"""
Solve the entropic regularization optimal transport problem and return the OT matrix
The algorithm used is based on the paper
Near-linear time approximation algorithms for optimal transport via Sinkhorn iteration
by Jason Altschuler, Jonathan Weed, Philippe Rigollet
appeared at NIPS 2017
which is a stochastic version of the Sinkhorn-Knopp algorithm [2].
The function solves the following optimization problem:
.. math::
\gamma = arg\min_\gamma <\gamma,M>_F + reg\cdot\Omega(\gamma)
s.t. \gamma 1 = a
\gamma^T 1= b
\gamma\geq 0
where :
- M is the (ns,nt) metric cost matrix
- :math:`\Omega` is the entropic regularization term :math:`\Omega(\gamma)=\sum_{i,j} \gamma_{i,j}\log(\gamma_{i,j})`
- a and b are source and target weights (sum to 1)
Parameters
----------
a : np.ndarray (ns,)
samples weights in the source domain
b : np.ndarray (nt,) or np.ndarray (nt,nbb)
samples in the target domain, compute sinkhorn with multiple targets
and fixed M if b is a matrix (return OT loss + dual variables in log)
M : np.ndarray (ns,nt)
loss matrix
reg : float
Regularization term >0
numItermax : int, optional
Max number of iterations
stopThr : float, optional
Stop threshol on error (>0)
log : bool, optional
record log if True
Returns
-------
gamma : (ns x nt) ndarray
Optimal transportation matrix for the given parameters
log : dict
log dictionary return only if log==True in parameters
Examples
--------
>>> import ot
>>> a=[.5,.5]
>>> b=[.5,.5]
>>> M=[[0.,1.],[1.,0.]]
>>> ot.bregman.greenkhorn(a,b,M,1)
array([[ 0.36552929, 0.13447071],
[ 0.13447071, 0.36552929]])
References
----------
.. [2] M. Cuturi, Sinkhorn Distances : Lightspeed Computation of Optimal Transport, Advances in Neural Information Processing Systems (NIPS) 26, 2013
[22] J. Altschuler, J.Weed, P. Rigollet : Near-linear time approximation algorithms for optimal transport via Sinkhorn iteration, Advances in Neural Information Processing Systems (NIPS) 31, 2017
See Also
--------
ot.lp.emd : Unregularized OT
ot.optim.cg : General regularized OT
"""
a = np.asarray(a, dtype=np.float64)
b = np.asarray(b, dtype=np.float64)
M = np.asarray(M, dtype=np.float64)
if len(a) == 0:
a = np.ones((M.shape[0],), dtype=np.float64) / M.shape[0]
if len(b) == 0:
b = np.ones((M.shape[1],), dtype=np.float64) / M.shape[1]
n = a.shape[0]
m = b.shape[0]
# Next 3 lines equivalent to K= np.exp(-M/reg), but faster to compute
K = np.empty_like(M)
np.divide(M, -reg, out=K)
np.exp(K, out=K)
u = np.full(n, 1. / n)
v = np.full(m, 1. / m)
G = u[:, np.newaxis] * K * v[np.newaxis, :]
viol = G.sum(1) - a
viol_2 = G.sum(0) - b
stopThr_val = 1
if log:
log = dict()
log['u'] = u
log['v'] = v
for i in range(numItermax):
i_1 = np.argmax(np.abs(viol))
i_2 = np.argmax(np.abs(viol_2))
m_viol_1 = np.abs(viol[i_1])
m_viol_2 = np.abs(viol_2[i_2])
stopThr_val = np.maximum(m_viol_1, m_viol_2)
if m_viol_1 > m_viol_2:
old_u = u[i_1]
u[i_1] = a[i_1] / (K[i_1, :].dot(v))
G[i_1, :] = u[i_1] * K[i_1, :] * v
viol[i_1] = u[i_1] * K[i_1, :].dot(v) - a[i_1]
viol_2 += (K[i_1, :].T * (u[i_1] - old_u) * v)
else:
old_v = v[i_2]
v[i_2] = b[i_2] / (K[:, i_2].T.dot(u))
G[:, i_2] = u * K[:, i_2] * v[i_2]
#aviol = (G@one_m - a)
#aviol_2 = (G.T@one_n - b)
viol += (-old_v + v[i_2]) * K[:, i_2] * u
viol_2[i_2] = v[i_2] * K[:, i_2].dot(u) - b[i_2]
#print('b',np.max(abs(aviol -viol)),np.max(abs(aviol_2 - viol_2)))
if stopThr_val <= stopThr:
break
else:
print('Warning: Algorithm did not converge')
if log:
log['u'] = u
log['v'] = v
if log:
return G, log
else:
return G |
java | public static void validateClusterPartitionState(final Cluster subsetCluster,
final Cluster supersetCluster) {
if(!supersetCluster.getNodeIds().containsAll(subsetCluster.getNodeIds())) {
throw new VoldemortException("Superset cluster does not contain all nodes from subset cluster[ subset cluster node ids ("
+ subsetCluster.getNodeIds()
+ ") are not a subset of superset cluster node ids ("
+ supersetCluster.getNodeIds() + ") ]");
}
for(int nodeId: subsetCluster.getNodeIds()) {
Node supersetNode = supersetCluster.getNodeById(nodeId);
Node subsetNode = subsetCluster.getNodeById(nodeId);
if(!supersetNode.getPartitionIds().equals(subsetNode.getPartitionIds())) {
throw new VoldemortRebalancingException("Partition IDs do not match between clusters for nodes with id "
+ nodeId
+ " : subset cluster has "
+ subsetNode.getPartitionIds()
+ " and superset cluster has "
+ supersetNode.getPartitionIds());
}
}
Set<Integer> nodeIds = supersetCluster.getNodeIds();
nodeIds.removeAll(subsetCluster.getNodeIds());
for(int nodeId: nodeIds) {
Node supersetNode = supersetCluster.getNodeById(nodeId);
if(!supersetNode.getPartitionIds().isEmpty()) {
throw new VoldemortRebalancingException("New node "
+ nodeId
+ " in superset cluster already has partitions: "
+ supersetNode.getPartitionIds());
}
}
} |
java | public static boolean isEqualIgnoreCase(String pStr1, String pStr2) {
if (pStr1 == null && pStr2 == null) {
return true;
} else if (pStr1 == null || pStr2 == null) {
return false;
} else if (pStr1.equalsIgnoreCase(pStr2)) {
return true;
}
return false;
} |
java | public EventsBatch withEvents(java.util.Map<String, Event> events) {
setEvents(events);
return this;
} |
python | def blockgen(blocks, shape):
"""Generate a list of slice tuples to be used by combine.
The tuples represent regions in an N-dimensional image.
:param blocks: a tuple of block sizes
:param shape: the shape of the n-dimensional array
:return: an iterator to the list of tuples of slices
Example:
>>> blocks = (500, 512)
>>> shape = (1040, 1024)
>>> for i in blockgen(blocks, shape):
... print i
(slice(0, 260, None), slice(0, 512, None))
(slice(0, 260, None), slice(512, 1024, None))
(slice(260, 520, None), slice(0, 512, None))
(slice(260, 520, None), slice(512, 1024, None))
(slice(520, 780, None), slice(0, 512, None))
(slice(520, 780, None), slice(512, 1024, None))
(slice(780, 1040, None), slice(0, 512, None))
(slice(780, 1040, None), slice(512, 1024, None))
"""
iterables = [blockgen1d(l, s) for (l, s) in zip(blocks, shape)]
return product(*iterables) |
python | def search_aikif(txt, formatHTML=True):
"""
search for text - currently this looks in all folders in the
root of AIKIF but that also contains binaries so will need to
use the agent_filelist.py to specify the list of folders.
NOTE - this needs to use indexes rather than full search each time
"""
results = []
num_found = 0
import aikif.lib.cls_filelist as mod_fl
my_files = mod_fl.FileList([aikif_folder ], ['*.*'], ['*.pyc'])
files = my_files.get_list()
for f in files:
try:
num_found = 0
with open(f, 'r') as cur:
line_num = 0
for line in cur:
line_num += 1
if txt in line:
num_found += 1
if formatHTML is True:
results.append(format_result(line, line_num, txt))
else:
results.append([f, line, line_num, txt])
if num_found > 0:
if formatHTML is True:
results.append('<h3>' + f + ' = ' + str(num_found) + ' results</h3>')
else:
print(f + ' = ' + str(num_found) + '')
except Exception:
results.append('problem with file ' + f)
if len(results) == 0:
results.append("No results")
return results |
java | public static String join(String _delimiter, String[] _strings) {
return join(_delimiter, Arrays.asList(_strings));
} |
python | def setVerticalAxis(self, axis):
"""
Sets the vertical axis for this chart.
:param axis | <XChartAxis>
"""
self._verticalAxis = axis
if axis:
axis.setOrientation(Qt.Vertical)
self.uiYAxisVIEW.setFixedWidth(axis.minimumLabelWidth() + 15)
self.uiYAxisVIEW.setVisible(axis is not None) |
python | def get_output(self, output_files, clear=True):
"Get the output files as an id indexed dict."
patt = re.compile(r'(.*?)-semantics.*?')
for outpath in output_files:
if outpath is None:
logger.warning("Found outpath with value None. Skipping.")
continue
re_out = patt.match(path.basename(outpath))
if re_out is None:
raise SparserError("Could not get prefix from output path %s."
% outpath)
prefix = re_out.groups()[0]
if prefix.startswith('PMC'):
prefix = prefix[3:]
if prefix.isdecimal():
# In this case we assume the prefix is a tcid.
prefix = int(prefix)
try:
with open(outpath, 'rt') as f:
content = json.load(f)
except Exception as e:
logger.exception(e)
logger.error("Could not load reading content from %s."
% outpath)
content = None
self.add_result(prefix, content)
if clear:
input_path = outpath.replace('-semantics.json', '.nxml')
try:
remove(outpath)
remove(input_path)
except Exception as e:
logger.exception(e)
logger.error("Could not remove sparser files %s and %s."
% (outpath, input_path))
return self.results |
python | def main_module(self):
"""Return the main module to which the receiver belongs."""
if self.i_module.keyword == "submodule":
return self.i_module.i_ctx.get_module(
self.i_module.i_including_modulename)
return self.i_module |
python | def remainder(self, max_value=None):
"""
Returns the time remaining for the timeout, or *max_value* if that
remainder is larger.
"""
if self.value is None:
return max_value
remainder = self.value - (time.clock() - self.start)
if remainder < 0.0:
return 0.0
elif max_value is not None and remainder > max_value:
return max_value
else:
return remainder |
java | final boolean isExpired() throws SevereMessageStoreException
{ // 182086
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "isExpired");
if (isAvailable() && internalCanExpire())
{
// we can only expire if we are available
long expiryTime = _tuple.getExpiryTime();
if (expiryTime != 0 && expiryTime <= Expirer.timeNow())
{
// we are due to expire
boolean hasBecomeNonReleasable = false;
synchronized (this)
{
if (ItemLinkState.STATE_AVAILABLE == _itemLinkState)
{
// force item to be present
AbstractItem item = _getItemNoRestore();
if (null == item)
{
item = _restoreItem();
}
ListStatistics stats = getParentStatistics();
synchronized (stats)
{
stats.incrementExpiring();
stats.decrementAvailable();
}
_lockID = AbstractItemLink.EXPIRY_LOCK_ID;
hasBecomeNonReleasable = _declareNotDiscardable(item);
_itemLinkState = ItemLinkState.STATE_LOCKED_FOR_EXPIRY;
}
}
if (hasBecomeNonReleasable)
{
_declareNotReleasable();
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "isExpired", Boolean.valueOf(isExpiring()));
return isExpiring();
} |
java | public void copyResourceToProject(String resourcename) throws CmsException {
CmsResource resource = readResource(resourcename, CmsResourceFilter.ALL);
copyResourceToProject(resource);
} |
java | public JavaScriptActionBuilder javascript(Resource script, Charset charset) {
JavaScriptAction action = new JavaScriptAction();
try {
action.setScript(FileUtils.readToString(script, charset));
} catch (IOException e) {
throw new CitrusRuntimeException("Failed to read script resource", e);
}
action(action);
return new JavaScriptActionBuilder(action);
} |
java | @Override
public ListAgentsResult listAgents(ListAgentsRequest request) {
request = beforeClientExecution(request);
return executeListAgents(request);
} |
java | public List<Relationship> getNextRelationships() {
final List<Relationship> nextRelationships = new LinkedList<Relationship>();
for (final Relationship r : relationships) {
if (r.getType() == RelationshipType.NEXT) {
nextRelationships.add(r);
}
}
return nextRelationships;
} |
python | def transaction(self, callback):
"""Executes a function in a transaction.
The function gets passed this Connection instance as an (optional) parameter.
If an exception occurs during execution of the function or transaction commit,
the transaction is rolled back and the exception re-thrown.
:param callback: the function to execute in a transaction
:return: the value returned by the `callback`
:raise: Exception
"""
self.begin_transaction()
try:
result = callback(self)
self.commit()
return result
except:
self.rollback()
raise |
python | def _filter_fields(self, filter_function):
"""
Utility to iterate through all fields (super types first) of a type.
:param filter: A function that takes in a Field object. If it returns
True, the field is part of the generated output. If False, it is
omitted.
"""
fields = []
if self.parent_type:
fields.extend(self.parent_type._filter_fields(filter_function))
fields.extend(filter(filter_function, self.fields))
return fields |
python | def intervals(self, startdate, enddate, parseinterval=None):
'''Given a ``startdate`` and an ``enddate`` dates, evaluate the
date intervals from which data is not available. It return a list
of two-dimensional tuples containing start and end date for the
interval. The list could contain 0, 1 or 2 tuples.'''
return missing_intervals(startdate, enddate, self.data_start,
self.data_end, dateconverter=self.todate,
parseinterval=parseinterval) |
python | def process_sparser_output(output_fname, output_fmt='json'):
"""Return a processor with Statements extracted from Sparser XML or JSON
Parameters
----------
output_fname : str
The path to the Sparser output file to be processed. The file can
either be JSON or XML output from Sparser, with the output_fmt
parameter defining what format is assumed to be processed.
output_fmt : Optional[str]
The format of the Sparser output to be processed, can either be
'json' or 'xml'. Default: 'json'
Returns
-------
sp : SparserXMLProcessor or SparserJSONProcessor depending on what output
format was chosen.
"""
if output_fmt not in ['json', 'xml']:
logger.error("Unrecognized output format '%s'." % output_fmt)
return None
sp = None
with open(output_fname, 'rt') as fh:
if output_fmt == 'json':
json_dict = json.load(fh)
sp = process_json_dict(json_dict)
else:
xml_str = fh.read()
sp = process_xml(xml_str)
return sp |
java | public static long getTimeMillisWithNano(int year, int month,
int dayOfMonth, int hour, int minute, int second, int nanoOfSecond,
String zoneId) {
return ZonedDateTime.of(year, month, dayOfMonth, hour, minute, second,
nanoOfSecond, getZoneId(zoneId)).toInstant().toEpochMilli();
} |
python | def relaxNGNewParserCtxt(URL):
"""Create an XML RelaxNGs parse context for that file/resource
expected to contain an XML RelaxNGs file. """
ret = libxml2mod.xmlRelaxNGNewParserCtxt(URL)
if ret is None:raise parserError('xmlRelaxNGNewParserCtxt() failed')
return relaxNgParserCtxt(_obj=ret) |
java | public OsmLayer createOsmLayer(String id, int nrOfLevels, String url) {
OsmLayer layer = new OsmLayer(id, createOsmTileConfiguration(nrOfLevels));
layer.addUrl(url);
return layer;
} |
python | def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf) |
python | def _maintain_dep_graph(self, p_todo):
"""
Makes sure that the dependency graph is consistent according to the
given todo.
"""
dep_id = p_todo.tag_value('id')
# maintain dependency graph
if dep_id:
self._parentdict[dep_id] = p_todo
self._depgraph.add_node(hash(p_todo))
# connect all tasks we have in memory so far that refer to this
# task
for dep in \
[dep for dep in self._todos if dep.has_tag('p', dep_id)]:
self._add_edge(p_todo, dep, dep_id)
for dep_id in p_todo.tag_values('p'):
try:
parent = self._parentdict[dep_id]
self._add_edge(parent, p_todo, dep_id)
except KeyError:
pass |
python | def _construct_bower_command(bower_command):
'''
Create bower command line string
'''
if not bower_command:
raise CommandExecutionError(
'bower_command, e.g. install, must be specified')
cmd = ['bower'] + shlex.split(bower_command)
cmd.extend(['--config.analytics', 'false',
'--config.interactive', 'false',
'--allow-root', '--json'])
return cmd |
java | public static void setTextInfoTranslationEnabled(boolean textInfoTranslationEnabled, Locale locale) {
com.ibm.ws.pmi.stat.StatsImpl.setEnableNLS(textInfoTranslationEnabled, locale);
} |
java | public DBClusterSnapshotAttributesResult withDBClusterSnapshotAttributes(DBClusterSnapshotAttribute... dBClusterSnapshotAttributes) {
if (this.dBClusterSnapshotAttributes == null) {
setDBClusterSnapshotAttributes(new java.util.ArrayList<DBClusterSnapshotAttribute>(dBClusterSnapshotAttributes.length));
}
for (DBClusterSnapshotAttribute ele : dBClusterSnapshotAttributes) {
this.dBClusterSnapshotAttributes.add(ele);
}
return this;
} |
python | def get_raw_input(description, default=False):
"""Get user input from the command line via raw_input / input.
description (unicode): Text to display before prompt.
default (unicode or False/None): Default value to display with prompt.
RETURNS (unicode): User input.
"""
additional = ' (default: %s)' % default if default else ''
prompt = ' %s%s: ' % (description, additional)
user_input = input_(prompt)
return user_input |
java | @Pure
@Override
public PathIterator3f getPathIterator(Transform3D transform) {
if (transform == null) {
return new CopyPathIterator3f();
}
return new TransformPathIterator3f(transform);
} |
python | def get_project(self, project_short_name):
"""Return project object."""
project = pbclient.find_project(short_name=project_short_name,
all=self.all)
if (len(project) == 1):
return project[0]
else:
raise ProjectNotFound(project_short_name) |
python | def get_rendition_url(request, image_id, target_width=0, target_height=0):
'''
get a rendition url
if the rendition does nto exist it will be created in the storage
if dimensions do not fit master's aspect ratio
then image will be cropped with a centered anchor
if one dimensions is omitted (0)
the other one will be generated accordind to master's aspect ratio
:param request: http GET request
/renderer/rendition/url/<image_id>/<target_width>/<target_height>/
:param image_id: the master image primary key
:param target_width: target image width
if 0 renderer will use target_height
to generate a image with correct aspect ratio
:param target_height: target image height
if 0 renderer will use target_width
to generate a image height correct aspect ratio
:return: rendition url in a json dictionary
'''
im = get_object_or_404(MasterImage, pk=image_id)
return JsonResponse({
'url': im.get_rendition_url(target_width, target_height)
}) |
java | public static RealMatrix colSubtract (RealMatrix matrix, double[] vector) {
// Declare and initialize the new matrix:
double[][] retval = new double[matrix.getRowDimension()][matrix.getColumnDimension()];
// Iterate over rows:
for (int col = 0; col < retval.length; col++) {
// Iterate over columns:
for (int row = 0; row < retval[0].length; row++) {
retval[row][col] = matrix.getEntry(row, col) - vector[row];
}
}
// Done, return a new matrix:
return MatrixUtils.createRealMatrix(retval);
} |
python | def get_flag_value(self, name, default): # pylint: disable=invalid-name
"""Returns the value of a flag (if not None) or a default value.
Args:
name: str, the name of a flag.
default: Default value to use if the flag value is None.
Returns:
Requested flag value or default.
"""
value = self.__getattr__(name)
if value is not None: # Can't do if not value, b/c value might be '0' or ""
return value
else:
return default |
java | private void initPatternControllers() {
m_patternControllers.put(PatternType.NONE, new CmsPatternPanelNoneController());
m_patternControllers.put(PatternType.DAILY, new CmsPatternPanelDailyController(m_model, this));
m_patternControllers.put(PatternType.WEEKLY, new CmsPatternPanelWeeklyController(m_model, this));
m_patternControllers.put(PatternType.MONTHLY, new CmsPatternPanelMonthlyController(m_model, this));
m_patternControllers.put(PatternType.YEARLY, new CmsPatternPanelYearlyController(m_model, this));
// m_patternControllers.put(PatternType.INDIVIDUAL, new CmsPatternPanelIndividualController(m_model, this));
} |
python | def ordering(self, type):
"""
Get the attribute ordering defined in the specified
XSD type information.
@param type: An XSD type object.
@type type: SchemaObject
@return: An ordered list of attribute names.
@rtype: list
"""
result = []
for child, ancestry in type.resolve():
name = child.name
if child.name is None:
continue
if child.isattr():
name = '_%s' % child.name
result.append(name)
return result |
python | def _filter_image(self, url):
"The param is the image URL, which is returned if it passes all the filters."
return reduce(lambda f, g: f and g(f),
[
filters.AdblockURLFilter()(url),
filters.NoImageFilter(),
filters.SizeImageFilter(),
filters.MonoImageFilter(),
filters.FormatImageFilter(),
]) |
java | public EClass getIfcAmountOfSubstanceMeasure() {
if (ifcAmountOfSubstanceMeasureEClass == null) {
ifcAmountOfSubstanceMeasureEClass = (EClass) EPackage.Registry.INSTANCE
.getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(656);
}
return ifcAmountOfSubstanceMeasureEClass;
} |
java | public EventBus bindSync(Class<? extends EventObject> eventType, ActEventListener eventListener) {
return _bind(actEventListeners, eventType, eventListener, 0);
} |
java | @Override
public CommercePriceList fetchByC_ERC(long companyId,
String externalReferenceCode, boolean retrieveFromCache) {
Object[] finderArgs = new Object[] { companyId, externalReferenceCode };
Object result = null;
if (retrieveFromCache) {
result = finderCache.getResult(FINDER_PATH_FETCH_BY_C_ERC,
finderArgs, this);
}
if (result instanceof CommercePriceList) {
CommercePriceList commercePriceList = (CommercePriceList)result;
if ((companyId != commercePriceList.getCompanyId()) ||
!Objects.equals(externalReferenceCode,
commercePriceList.getExternalReferenceCode())) {
result = null;
}
}
if (result == null) {
StringBundler query = new StringBundler(4);
query.append(_SQL_SELECT_COMMERCEPRICELIST_WHERE);
query.append(_FINDER_COLUMN_C_ERC_COMPANYID_2);
boolean bindExternalReferenceCode = false;
if (externalReferenceCode == null) {
query.append(_FINDER_COLUMN_C_ERC_EXTERNALREFERENCECODE_1);
}
else if (externalReferenceCode.equals("")) {
query.append(_FINDER_COLUMN_C_ERC_EXTERNALREFERENCECODE_3);
}
else {
bindExternalReferenceCode = true;
query.append(_FINDER_COLUMN_C_ERC_EXTERNALREFERENCECODE_2);
}
String sql = query.toString();
Session session = null;
try {
session = openSession();
Query q = session.createQuery(sql);
QueryPos qPos = QueryPos.getInstance(q);
qPos.add(companyId);
if (bindExternalReferenceCode) {
qPos.add(externalReferenceCode);
}
List<CommercePriceList> list = q.list();
if (list.isEmpty()) {
finderCache.putResult(FINDER_PATH_FETCH_BY_C_ERC,
finderArgs, list);
}
else {
if (list.size() > 1) {
Collections.sort(list, Collections.reverseOrder());
if (_log.isWarnEnabled()) {
_log.warn(
"CommercePriceListPersistenceImpl.fetchByC_ERC(long, String, boolean) with parameters (" +
StringUtil.merge(finderArgs) +
") yields a result set with more than 1 result. This violates the logical unique restriction. There is no order guarantee on which result is returned by this finder.");
}
}
CommercePriceList commercePriceList = list.get(0);
result = commercePriceList;
cacheResult(commercePriceList);
}
}
catch (Exception e) {
finderCache.removeResult(FINDER_PATH_FETCH_BY_C_ERC, finderArgs);
throw processException(e);
}
finally {
closeSession(session);
}
}
if (result instanceof List<?>) {
return null;
}
else {
return (CommercePriceList)result;
}
} |
java | public static JMDictSense create() {
if (index >= instances.size()) {
instances.add(new JMDictSense());
}
return instances.get(index++);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.