language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | protected double invCdfRootFinding(double p, double tol)
{
if (p < 0 || p > 1)
throw new ArithmeticException("Value of p must be in the range [0,1], not " + p);
//two special case checks, as they can cause a failure to get a positive and negative value on the ends, which means we can't do a search for the root
//Special case check, p < min value
if(min() >= Integer.MIN_VALUE)
if(p <= cdf(min()))
return min();
//special case check, p >= max value
if(max() < Integer.MAX_VALUE)
if(p > cdf(max()-1))
return max();
//stewpwise nature fo discrete can cause problems for search, so we will use a smoothed cdf to pass in
// double toRet= invCdf(p, );
//Lets use an interpolated version of the CDF so that our numerical methods will behave better
Function1D cdfInterpolated = (double x) ->
{
double query = x;
//if it happens to fall on an int we just compute the regular value
if(Math.rint(query) == query)
return cdf((int)query) - p;
//else, interpolate
double larger = query+1;
double diff = larger-query;
return cdf(query)*diff + cdf(larger)*(1-diff) - p;
};
double a = Double.isInfinite(min()) ? Integer.MIN_VALUE*.95 : min();
double b = Double.isInfinite(max()) ? Integer.MAX_VALUE*.95 : max();
double toRet = Zeroin.root(tol, a, b, cdfInterpolated);
return Math.round(toRet);
} |
python | def check_shapes(pfeed, *, as_df=False, include_warnings=False):
"""
Analog of :func:`check_frequencies` for ``pfeed.shapes``
"""
table = 'shapes'
problems = []
# Preliminary checks
if pfeed.shapes is None:
return problems
f = pfeed.shapes.copy()
problems = check_for_required_columns(problems, table, f)
if problems:
return gt.format_problems(problems, as_df=as_df)
if include_warnings:
problems = check_for_invalid_columns(problems, table, f)
# Check shape_id
problems = gt.check_column(problems, table, f, 'shape_id', gt.valid_str)
# Check geometry
v = lambda x: isinstance(x, sg.LineString) and not x.is_empty
problems = gt.check_column(problems, table, f, 'geometry', v)
return gt.format_problems(problems, as_df=as_df) |
java | protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
log.severe("Binary content has been requested");
String repository = request.getParameter("repository");
String workspace = request.getParameter("workspace");
String path = request.getParameter("path");
String property = request.getParameter("property");
Connector connector = (Connector) request.getSession().getAttribute("connector");
try {
Session session = connector.find(repository).session(workspace);
Node node = session.getNode(path);
Property p;
try {
p = node.getProperty(property);
} catch (PathNotFoundException e) {
response.setContentType("text/html");
PrintWriter writer = response.getWriter();
writer.write("<html>");
writer.write("<p>Content not found or recently removed</p>");
writer.write("</html>");
return;
}
Binary binary = p.getBinary();
InputStream in = binary.getStream();
String contentType = node.getProperty("jcr:mimeType").getString();
response.setContentType(contentType);
int b;
while ((b = in.read()) != -1) {
response.getOutputStream().write(b);
}
log.severe("Sent binary content");
binary.dispose();
} catch (Exception e) {
throw new ServletException(e);
}
} |
java | public boolean isValidToken() {
try {
// Response response = wsBase.path(TEST_CONFIGS).request(MediaType.APPLICATION_JSON_TYPE).get();
// return response.getStatusInfo().getFamily() == Response.Status.Family.SUCCESSFUL;
LoadZone zone = getLoadZone(LoadZone.AMAZON_US_ASHBURN.uid);
return zone == LoadZone.AMAZON_US_ASHBURN;
} catch (Exception e) {
log.info("API token validation failed: " + e);
}
return false;
} |
python | def get_summary(self):
""" Return the function summary
Returns:
(str, list, list, list, list): (name, inheritance, variables, fuction summaries, modifier summaries)
"""
func_summaries = [f.get_summary() for f in self.functions]
modif_summaries = [f.get_summary() for f in self.modifiers]
return (self.name, [str(x) for x in self.inheritance], [str(x) for x in self.variables], func_summaries, modif_summaries) |
java | public void removeLine(final Line line)
{
if (line.previous == null)
{
this.lines = line.next;
}
else
{
line.previous.next = line.next;
}
if (line.next == null)
{
this.lineTail = line.previous;
}
else
{
line.next.previous = line.previous;
}
line.previous = line.next = null;
} |
python | def save(self, obj, data, is_m2m=False):
"""
If this field is not declared readonly, the object's attribute will
be set to the value returned by :meth:`~import_export.fields.Field.clean`.
"""
if not self.readonly:
attrs = self.attribute.split('__')
for attr in attrs[:-1]:
obj = getattr(obj, attr, None)
cleaned = self.clean(data)
if cleaned is not None or self.saves_null_values:
if not is_m2m:
setattr(obj, attrs[-1], cleaned)
else:
getattr(obj, attrs[-1]).set(cleaned) |
java | static List<JQLPlaceHolder> removeDynamicPlaceHolder(List<JQLPlaceHolder> placeHolders) {
List<JQLPlaceHolder> result = new ArrayList<>();
for (JQLPlaceHolder item : placeHolders) {
if (item.type != JQLPlaceHolderType.DYNAMIC_SQL) {
result.add(item);
}
}
return result;
} |
java | public static Object createObject(String rawJSON) throws TwitterException {
try {
JSONObject json = new JSONObject(rawJSON);
JSONObjectType.Type jsonObjectType = JSONObjectType.determine(json);
switch (jsonObjectType) {
case SENDER:
return registerJSONObject(new DirectMessageJSONImpl(json.getJSONObject("direct_message")), json);
case STATUS:
return registerJSONObject(new StatusJSONImpl(json), json);
case DIRECT_MESSAGE:
return registerJSONObject(new DirectMessageJSONImpl(json.getJSONObject("direct_message")), json);
case DELETE:
return registerJSONObject(new StatusDeletionNoticeImpl(json.getJSONObject("delete").getJSONObject("status")), json);
case LIMIT:
// TODO: Perhaps there should be a TrackLimitationNotice object?
// The onTrackLimitationNotice method could take that as an arg.
return json;
case SCRUB_GEO:
return json;
default:
// The object type is unrecognized...just return the json
return json;
}
} catch (JSONException e) {
throw new TwitterException(e);
}
} |
python | def process_messages_loop_internal(self):
"""
Busy loop that processes incoming WorkRequest messages via functions specified by add_command.
Terminates if a command runs shutdown method
"""
logging.info("Starting work queue loop.")
self.connection.receive_loop_with_callback(self.queue_name, self.process_message) |
python | def request_motion_detection_enable(blink, network, camera_id):
"""
Enable motion detection for a camera.
:param blink: Blink instance.
:param network: Sync module network id.
:param camera_id: Camera ID of camera to enable.
"""
url = "{}/network/{}/camera/{}/enable".format(blink.urls.base_url,
network,
camera_id)
return http_post(blink, url) |
java | public void run() {
boolean process = true;
synchronized(this) {
BlockEncodeRequest ber = manager.getWaitingRequest();
if(ber != null && ber.frameNumber < 0)
ber = null;
while(ber != null && process) {
if(ber.frameNumber < 0) {
process = false;
}
else {//get available BlockEncodeRequest from manager
ber.encodedSamples = frame.encodeSamples(ber.samples, ber.count,
ber.start, ber.skip, ber.result, ber.frameNumber);
ber.valid = true;
manager.returnFinishedRequest(ber);
ber = manager.getWaitingRequest();
}
}
manager.notifyFrameThreadExit(this);
}
} |
python | def fibre_channel_wwns():
'''
Return list of fiber channel HBA WWNs
'''
grains = {'fc_wwn': False}
if salt.utils.platform.is_linux():
grains['fc_wwn'] = _linux_wwns()
elif salt.utils.platform.is_windows():
grains['fc_wwn'] = _windows_wwns()
return grains |
java | public static void writeCollection(XMLOutput xmlOutput, Collection<? extends XMLWriteable> collection) throws IOException {
for (XMLWriteable obj : collection) {
obj.writeXML(xmlOutput);
}
} |
java | public static void createTables( Connection connection ) throws IOException, SQLException {
StringBuilder sB = new StringBuilder();
sB.append("CREATE TABLE ");
sB.append(TABLE_IMAGES);
sB.append(" (");
sB.append(ImageTableFields.COLUMN_ID.getFieldName());
sB.append(" INTEGER PRIMARY KEY, ");
sB.append(ImageTableFields.COLUMN_LON.getFieldName()).append(" REAL NOT NULL, ");
sB.append(ImageTableFields.COLUMN_LAT.getFieldName()).append(" REAL NOT NULL,");
sB.append(ImageTableFields.COLUMN_ALTIM.getFieldName()).append(" REAL NOT NULL,");
sB.append(ImageTableFields.COLUMN_AZIM.getFieldName()).append(" REAL NOT NULL,");
sB.append(ImageTableFields.COLUMN_IMAGEDATA_ID.getFieldName());
sB.append(" INTEGER NOT NULL ");
sB.append("CONSTRAINT " + ImageTableFields.COLUMN_IMAGEDATA_ID.getFieldName() + " REFERENCES ");
sB.append(TABLE_IMAGE_DATA);
sB.append("(");
sB.append(ImageDataTableFields.COLUMN_ID);
sB.append(") ON DELETE CASCADE,");
sB.append(ImageTableFields.COLUMN_TS.getFieldName()).append(" DATE NOT NULL,");
sB.append(ImageTableFields.COLUMN_TEXT.getFieldName()).append(" TEXT NOT NULL,");
sB.append(ImageTableFields.COLUMN_NOTE_ID.getFieldName()).append(" INTEGER,");
sB.append(ImageTableFields.COLUMN_ISDIRTY.getFieldName()).append(" INTEGER NOT NULL");
sB.append(");");
String CREATE_TABLE_IMAGES = sB.toString();
sB = new StringBuilder();
sB.append("CREATE INDEX images_ts_idx ON ");
sB.append(TABLE_IMAGES);
sB.append(" ( ");
sB.append(ImageTableFields.COLUMN_TS.getFieldName());
sB.append(" );");
String CREATE_INDEX_IMAGES_TS = sB.toString();
sB = new StringBuilder();
sB.append("CREATE INDEX images_x_by_y_idx ON ");
sB.append(TABLE_IMAGES);
sB.append(" ( ");
sB.append(ImageTableFields.COLUMN_LON.getFieldName());
sB.append(", ");
sB.append(ImageTableFields.COLUMN_LAT.getFieldName());
sB.append(" );");
String CREATE_INDEX_IMAGES_X_BY_Y = sB.toString();
sB = new StringBuilder();
sB.append("CREATE INDEX images_noteid_idx ON ");
sB.append(TABLE_IMAGES);
sB.append(" ( ");
sB.append(ImageTableFields.COLUMN_NOTE_ID.getFieldName());
sB.append(" );");
String CREATE_INDEX_IMAGES_NOTEID = sB.toString();
sB = new StringBuilder();
sB.append("CREATE INDEX images_isdirty_idx ON ");
sB.append(TABLE_IMAGES);
sB.append(" ( ");
sB.append(ImageTableFields.COLUMN_ISDIRTY.getFieldName());
sB.append(" );");
String CREATE_INDEX_IMAGES_ISDIRTY = sB.toString();
sB = new StringBuilder();
sB.append("CREATE TABLE ");
sB.append(TABLE_IMAGE_DATA);
sB.append(" (");
sB.append(ImageDataTableFields.COLUMN_ID.getFieldName());
sB.append(" INTEGER PRIMARY KEY, ");
sB.append(ImageDataTableFields.COLUMN_IMAGE.getFieldName()).append(" BLOB NOT NULL,");
sB.append(ImageDataTableFields.COLUMN_THUMBNAIL.getFieldName()).append(" BLOB NOT NULL");
sB.append(");");
String CREATE_TABLE_IMAGEDATA = sB.toString();
try (Statement statement = connection.createStatement()) {
statement.setQueryTimeout(30); // set timeout to 30 sec.
statement.executeUpdate(CREATE_TABLE_IMAGES);
statement.executeUpdate(CREATE_INDEX_IMAGES_TS);
statement.executeUpdate(CREATE_INDEX_IMAGES_X_BY_Y);
statement.executeUpdate(CREATE_INDEX_IMAGES_NOTEID);
statement.executeUpdate(CREATE_INDEX_IMAGES_ISDIRTY);
statement.executeUpdate(CREATE_TABLE_IMAGEDATA);
} catch (Exception e) {
throw new IOException(e.getLocalizedMessage());
}
} |
java | public final void addItemStream(final ItemStream itemStream, long lockID, final Transaction transaction) throws MessageStoreException
{
// Defect 410652
// Check the transaction being used is from the same MessageStore as
// ours so that we don't get a mismatch and run the possibility of
// hitting a DuplicateKeyException at persistence time. If the MS's
// don't match then the unique key generator being used for this
// add could be using a range that has already been used in the MS
// that will be used to persist the transaction.
final MessageStoreImpl messageStore = getMessageStoreImpl();
final MessageStore tranStore = ((PersistentTransaction)transaction).getOwningMessageStore();
// We only need to do a simple equality check as all that we really
// care about is that the same MS instance in memory is being used.
if (messageStore != tranStore)
{
MismatchedMessageStoreException mmse = new MismatchedMessageStoreException("Transaction supplied on add does not originate from this MessageStore! MS: "+messageStore+", Tran.MS: "+tranStore);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.event(this, tc, "Transaction supplied on add does not originate from this MessageStore! MS: "+messageStore+", Tran.MS: "+tranStore, mmse);
throw mmse;
}
int strategy = itemStream.getStorageStrategy();
final long itemID = messageStore.getUniqueValue(strategy);
TupleTypeEnum type = TupleTypeEnum.ITEM_STREAM;
Persistable childPersistable = getTuple().createPersistable(itemID, type);
childPersistable.setStorageStrategy(strategy);
final AbstractItemLink link = new ItemStreamLink(itemStream, this, childPersistable);
// Defect 463642
// Revert to using spill limits previously removed in SIB0112d.ms.2
link.setParentWasSpillingAtAddTime(getListStatistics().isSpilling());
messageStore.registerLink(link, itemStream);
link.cmdAdd(this, lockID, (PersistentTransaction)transaction);
} |
java | @Override
protected void checkPrimitiveValidity() {
BusPrimitiveInvalidity invalidityReason = null;
if (this.itineraries.isEmpty()) {
invalidityReason = new BusPrimitiveInvalidity(
BusPrimitiveInvalidityType.NO_ITINERARY_IN_LINE,
null);
} else {
final Iterator<BusItinerary> iterator = this.itineraries.iterator();
BusItinerary itinerary;
while (iterator.hasNext() && invalidityReason == null) {
itinerary = iterator.next();
if (!itinerary.isValidPrimitive()) {
invalidityReason = new BusPrimitiveInvalidity(
BusPrimitiveInvalidityType.INVALID_ITINERARY_IN_LINE,
itinerary.getName());
}
}
}
setPrimitiveValidity(invalidityReason);
} |
java | @Override public void set(FieldType field, Object value)
{
if (field != null)
{
int index = field.getValue();
if (m_eventsEnabled)
{
fireFieldChangeEvent((ResourceField) field, m_array[index], value);
}
m_array[index] = value;
}
} |
java | public void marshall(GetDomainNamesRequest getDomainNamesRequest, ProtocolMarshaller protocolMarshaller) {
if (getDomainNamesRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getDomainNamesRequest.getPosition(), POSITION_BINDING);
protocolMarshaller.marshall(getDomainNamesRequest.getLimit(), LIMIT_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
python | def memoize(method):
"""A new method which acts like the given method but memoizes arguments
See https://en.wikipedia.org/wiki/Memoization for the general idea
>>> @memoize
... def test(arg):
... print('called')
... return arg + 1
>>> test(1)
called
2
>>> test(2)
called
3
>>> test(1)
2
The returned method also has an attached method "invalidate"
which removes given values from the cache
Or empties the cache if no values are given
>>> test.invalidate(2)
>>> test(1)
2
>>> test(2)
called
3
"""
method.cache = {}
def invalidate(*arguments, **keyword_arguments):
key = _represent_arguments(*arguments, **keyword_arguments)
if not key:
method.cache = {}
elif key in method.cache:
del method.cache[key]
else:
raise KeyError(
'Not prevously cached: %s(%s)' % (method.__name__, key))
def new_method(*arguments, **keyword_arguments):
"""Cache the arguments and return values of the call
The key cached is the repr() of arguments
This allows more types of values to be used as keys to the cache
Such as lists and tuples
"""
key = _represent_arguments(*arguments, **keyword_arguments)
if key not in method.cache:
method.cache[key] = method(*arguments, **keyword_arguments)
return method.cache[key]
new_method.invalidate = invalidate
new_method.__doc__ = method.__doc__
new_method.__name__ = 'memoize(%s)' % method.__name__
return new_method |
python | def statuses_destroy(self, id, trim_user=None):
"""
Destroys the status specified by the ID parameter.
https://dev.twitter.com/docs/api/1.1/post/statuses/destroy/%3Aid
:param str id:
(*required*) The numerical ID of the desired tweet.
:param bool trim_user:
When set to ``True``, the return value's user object includes only
the status author's numerical ID.
:returns:
A tweet dict containing the destroyed tweet.
"""
params = {'id': id}
set_bool_param(params, 'trim_user', trim_user)
return self._post_api('statuses/destroy.json', params) |
java | public final void crc32(Register dst, Register src)
{
assert(dst.isRegType(REG_GPD) || dst.isRegType(REG_GPQ));
emitX86(INST_CRC32, dst, src);
} |
java | private void addDescendantElements() throws NodeSelectorException {
for (Node node : nodes) {
List<Node> nl;
if (node instanceof Document || node instanceof Element) {
nl = DOMHelper.getElementsByTagName(node, selector.getTagName());
} else {
throw new NodeSelectorException("Only document and element nodes allowed!");
}
result.addAll(nl);
}
} |
python | def dtype(self):
"""Data-type of the array's elements.
Returns
-------
numpy.dtype
This NDArray's data type.
Examples
--------
>>> x = mx.nd.zeros((2,3))
>>> x.dtype
<type 'numpy.float32'>
>>> y = mx.nd.zeros((2,3), dtype='int32')
>>> y.dtype
<type 'numpy.int32'>
"""
mx_dtype = ctypes.c_int()
check_call(_LIB.MXNDArrayGetDType(
self.handle, ctypes.byref(mx_dtype)))
return _DTYPE_MX_TO_NP[mx_dtype.value] |
java | private void checkDescendantNames(Name name, boolean nameIsDefined) {
if (name.props != null) {
for (Name prop : name.props) {
// if the ancestor of a property is not defined, then we should emit
// warnings for all references to the property.
boolean propIsDefined = false;
if (nameIsDefined) {
// if the ancestor of a property is defined, then let's check that
// the property is also explicitly defined if it needs to be.
propIsDefined =
(!propertyMustBeInitializedByFullName(prop)
|| prop.getGlobalSets() + prop.getLocalSets() > 0);
}
validateName(prop, propIsDefined);
checkDescendantNames(prop, propIsDefined);
}
}
} |
python | def get_zone_temperature(self, zone_name):
"""
Get the temperature for a zone
"""
zone = self.get_zone(zone_name)
if zone is None:
raise RuntimeError("Unknown zone")
return zone['currentTemperature'] |
java | public FactorEnrollmentResponse activateFactor(long userId, long deviceId) throws OAuthSystemException, OAuthProblemException, URISyntaxException
{
cleanError();
prepareToken();
URIBuilder url = new URIBuilder(settings.getURL(Constants.ACTIVATE_FACTOR_URL, userId, deviceId));
OneloginURLConnectionClient httpClient = new OneloginURLConnectionClient();
OAuthClient oAuthClient = new OAuthClient(httpClient);
OAuthClientRequest bearerRequest = new OAuthBearerClientRequest(url.toString())
.buildHeaderMessage();
Map<String, String> headers = getAuthorizedHeader();
bearerRequest.setHeaders(headers);
FactorEnrollmentResponse factorEntollmentResponse = null;
OneloginOAuthJSONResourceResponse oAuthResponse = oAuthClient.resource(bearerRequest, OAuth.HttpMethod.POST, OneloginOAuthJSONResourceResponse.class);
if (oAuthResponse.getResponseCode() == 200) {
JSONObject data = oAuthResponse.getData();
factorEntollmentResponse = new FactorEnrollmentResponse(data);
} else {
error = oAuthResponse.getError();
errorDescription = oAuthResponse.getErrorDescription();
}
return factorEntollmentResponse;
} |
java | @SuppressWarnings("checkstyle:magicnumber")
public TextStyle capacityMethodInvocation() {
final TextStyle textStyle = extensionMethodInvocation().copy();
//textStyle.setColor(new RGB(128, 36, 0));
textStyle.setStyle(SWT.ITALIC);
return textStyle;
} |
python | def predict(self, data, output_margin=False, ntree_limit=None, validate_features=True):
"""
Predict with `data`.
.. note:: This function is not thread safe.
For each booster object, predict can only be called from one thread.
If you want to run prediction using multiple thread, call ``xgb.copy()`` to make copies
of model object and then call ``predict()``.
.. note:: Using ``predict()`` with DART booster
If the booster object is DART type, ``predict()`` will perform dropouts, i.e. only
some of the trees will be evaluated. This will produce incorrect results if ``data`` is
not the training data. To obtain correct results on test sets, set ``ntree_limit`` to
a nonzero value, e.g.
.. code-block:: python
preds = bst.predict(dtest, ntree_limit=num_round)
Parameters
----------
data : DMatrix
The dmatrix storing the input.
output_margin : bool
Whether to output the raw untransformed margin value.
ntree_limit : int
Limit number of trees in the prediction; defaults to best_ntree_limit if defined
(i.e. it has been trained with early stopping), otherwise 0 (use all trees).
validate_features : bool
When this is True, validate that the Booster's and data's feature_names are identical.
Otherwise, it is assumed that the feature_names are the same.
Returns
-------
prediction : numpy array
"""
test_dmatrix = DMatrix(data, missing=self.missing, nthread=self.n_jobs)
if ntree_limit is None:
ntree_limit = getattr(self, "best_ntree_limit", 0)
class_probs = self.get_booster().predict(test_dmatrix,
output_margin=output_margin,
ntree_limit=ntree_limit,
validate_features=validate_features)
if output_margin:
# If output_margin is active, simply return the scores
return class_probs
if len(class_probs.shape) > 1:
column_indexes = np.argmax(class_probs, axis=1)
else:
column_indexes = np.repeat(0, class_probs.shape[0])
column_indexes[class_probs > 0.5] = 1
return self._le.inverse_transform(column_indexes) |
java | public final EObject ruleXAdditiveExpression() throws RecognitionException {
EObject current = null;
EObject this_XMultiplicativeExpression_0 = null;
EObject lv_rightOperand_3_0 = null;
enterRule();
try {
// InternalSARL.g:13044:2: ( (this_XMultiplicativeExpression_0= ruleXMultiplicativeExpression ( ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) ) ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) ) )* ) )
// InternalSARL.g:13045:2: (this_XMultiplicativeExpression_0= ruleXMultiplicativeExpression ( ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) ) ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) ) )* )
{
// InternalSARL.g:13045:2: (this_XMultiplicativeExpression_0= ruleXMultiplicativeExpression ( ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) ) ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) ) )* )
// InternalSARL.g:13046:3: this_XMultiplicativeExpression_0= ruleXMultiplicativeExpression ( ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) ) ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) ) )*
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAdditiveExpressionAccess().getXMultiplicativeExpressionParserRuleCall_0());
}
pushFollow(FOLLOW_123);
this_XMultiplicativeExpression_0=ruleXMultiplicativeExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XMultiplicativeExpression_0;
afterParserOrEnumRuleCall();
}
// InternalSARL.g:13054:3: ( ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) ) ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) ) )*
loop312:
do {
int alt312=2;
int LA312_0 = input.LA(1);
if ( (LA312_0==75) ) {
int LA312_2 = input.LA(2);
if ( (synpred39_InternalSARL()) ) {
alt312=1;
}
}
else if ( (LA312_0==74) ) {
int LA312_3 = input.LA(2);
if ( (synpred39_InternalSARL()) ) {
alt312=1;
}
}
switch (alt312) {
case 1 :
// InternalSARL.g:13055:4: ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) ) ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) )
{
// InternalSARL.g:13055:4: ( ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) ) )
// InternalSARL.g:13056:5: ( ( () ( ( ruleOpAdd ) ) ) )=> ( () ( ( ruleOpAdd ) ) )
{
// InternalSARL.g:13066:5: ( () ( ( ruleOpAdd ) ) )
// InternalSARL.g:13067:6: () ( ( ruleOpAdd ) )
{
// InternalSARL.g:13067:6: ()
// InternalSARL.g:13068:7:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElementAndSet(
grammarAccess.getXAdditiveExpressionAccess().getXBinaryOperationLeftOperandAction_1_0_0_0(),
current);
}
}
// InternalSARL.g:13074:6: ( ( ruleOpAdd ) )
// InternalSARL.g:13075:7: ( ruleOpAdd )
{
// InternalSARL.g:13075:7: ( ruleOpAdd )
// InternalSARL.g:13076:8: ruleOpAdd
{
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXAdditiveExpressionRule());
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAdditiveExpressionAccess().getFeatureJvmIdentifiableElementCrossReference_1_0_0_1_0());
}
pushFollow(FOLLOW_45);
ruleOpAdd();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall();
}
}
}
}
}
// InternalSARL.g:13092:4: ( (lv_rightOperand_3_0= ruleXMultiplicativeExpression ) )
// InternalSARL.g:13093:5: (lv_rightOperand_3_0= ruleXMultiplicativeExpression )
{
// InternalSARL.g:13093:5: (lv_rightOperand_3_0= ruleXMultiplicativeExpression )
// InternalSARL.g:13094:6: lv_rightOperand_3_0= ruleXMultiplicativeExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAdditiveExpressionAccess().getRightOperandXMultiplicativeExpressionParserRuleCall_1_1_0());
}
pushFollow(FOLLOW_123);
lv_rightOperand_3_0=ruleXMultiplicativeExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXAdditiveExpressionRule());
}
set(
current,
"rightOperand",
lv_rightOperand_3_0,
"io.sarl.lang.SARL.XMultiplicativeExpression");
afterParserOrEnumRuleCall();
}
}
}
}
break;
default :
break loop312;
}
} while (true);
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} |
python | def send_mail(subject, message_plain, message_html, email_from, email_to,
custom_headers={}, attachments=()):
"""
Build the email as a multipart message containing
a multipart alternative for text (plain, HTML) plus
all the attached files.
"""
if not message_plain and not message_html:
raise ValueError(_("Either message_plain or message_html should be not None"))
if not message_plain:
message_plain = html2text(message_html)
message = {}
message['subject'] = subject
message['body'] = message_plain
message['from_email'] = email_from
message['to'] = email_to
if attachments:
message['attachments'] = attachments
if custom_headers:
message['headers'] = custom_headers
msg = EmailMultiAlternatives(**message)
if message_html:
msg.attach_alternative(message_html, "text/html")
msg.send() |
python | def add_custom_func(self, func, dim, *args, **kwargs):
""" adds a user defined function to extract features
Parameters
----------
func : function
a user-defined function, which accepts mdtraj.Trajectory object as
first parameter and as many optional and named arguments as desired.
Has to return a numpy.ndarray ndim=2.
dim : int
output dimension of :py:obj:`function`
description: str or None
a message for the describe feature list.
args : any number of positional arguments
these have to be in the same order as :py:obj:`func` is expecting them
kwargs : dictionary
named arguments passed to func
Notes
-----
You can pass a description list to describe the output of your function by element,
by passing a list of strings with the same lengths as dimensions.
Alternatively a single element list or str will be expanded to match the output dimension.
"""
description = kwargs.pop('description', None)
f = CustomFeature(func, dim=dim, description=description, fun_args=args, fun_kwargs=kwargs)
self.add_custom_feature(f) |
python | def component_doi(soup):
"""
Look for all object-id of pub-type-id = doi, these are the component DOI tags
"""
component_doi = []
object_id_tags = raw_parser.object_id(soup, pub_id_type = "doi")
# Get components too for later
component_list = components(soup)
position = 1
for tag in object_id_tags:
component_object = {}
component_object["doi"] = doi_uri_to_doi(tag.text)
component_object["position"] = position
# Try to find the type of component
for component in component_list:
if "doi" in component and component["doi"] == component_object["doi"]:
component_object["type"] = component["type"]
component_doi.append(component_object)
position = position + 1
return component_doi |
java | private static void checkLineFeed(final InputStream input,
final ByteArrayOutputStream baos, final Integer position)
throws IOException {
if (input.read() != '\n') {
throw new HttpException(
HttpURLConnection.HTTP_BAD_REQUEST,
String.format(
"there is no LF after CR in header, line #%d: \"%s\"",
position,
new Utf8String(baos.toByteArray()).asString()
)
);
}
} |
java | protected PageContext getPageContext()
{
JspContext ctxt = getJspContext();
if (ctxt instanceof PageContext)
return (PageContext) ctxt;
// assert the page context and log an error in production
assert(false) : "The JspContext was not a PageContext";
logger.error("The JspContext was not a PageContext");
return null;
} |
python | def _serialiseFirstJob(self, jobStore):
"""
Serialises the root job. Returns the wrapping job.
:param toil.jobStores.abstractJobStore.AbstractJobStore jobStore:
"""
# Check if the workflow root is a checkpoint but not a leaf vertex.
# All other job vertices in the graph are checked by checkNewCheckpointsAreLeafVertices
if self.checkpoint and not Job._isLeafVertex(self):
raise JobGraphDeadlockException(
'New checkpoint job %s is not a leaf in the job graph' % self)
# Create first jobGraph
jobGraph = self._createEmptyJobGraphForJob(jobStore=jobStore, predecessorNumber=0)
# Write the graph of jobs to disk
self._serialiseJobGraph(jobGraph, jobStore, None, True)
jobStore.update(jobGraph)
# Store the name of the first job in a file in case of restart. Up to this point the
# root job is not recoverable. FIXME: "root job" or "first job", which one is it?
jobStore.setRootJob(jobGraph.jobStoreID)
return jobGraph |
java | public boolean isExpired() {
if (_service.getTtl() == 0) {
return false;
}
long t = this.cacheStartTime.getTime();
Date validTime = new Date(t + (_service.getTtl() * ONE_MINUTE_IN_MILLIS));
Date now = new Date();
return (now.getTime() > validTime.getTime());
} |
java | public final EObject ruleXAssignment() throws RecognitionException {
EObject current = null;
EObject lv_value_3_0 = null;
EObject this_XConditionalExpression_4 = null;
EObject lv_rightOperand_7_0 = null;
enterRule();
try {
// InternalPureXbase.g:653:2: ( ( ( () ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( (lv_value_3_0= ruleXAssignment ) ) ) | (this_XConditionalExpression_4= ruleXConditionalExpression ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )? ) ) )
// InternalPureXbase.g:654:2: ( ( () ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( (lv_value_3_0= ruleXAssignment ) ) ) | (this_XConditionalExpression_4= ruleXConditionalExpression ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )? ) )
{
// InternalPureXbase.g:654:2: ( ( () ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( (lv_value_3_0= ruleXAssignment ) ) ) | (this_XConditionalExpression_4= ruleXConditionalExpression ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )? ) )
int alt17=2;
int LA17_0 = input.LA(1);
if ( (LA17_0==RULE_ID) ) {
int LA17_1 = input.LA(2);
if ( (LA17_1==20) ) {
alt17=1;
}
else if ( (LA17_1==EOF||(LA17_1>=RULE_STRING && LA17_1<=RULE_ID)||(LA17_1>=13 && LA17_1<=19)||(LA17_1>=21 && LA17_1<=62)||(LA17_1>=64 && LA17_1<=81)) ) {
alt17=2;
}
else {
if (state.backtracking>0) {state.failed=true; return current;}
NoViableAltException nvae =
new NoViableAltException("", 17, 1, input);
throw nvae;
}
}
else if ( ((LA17_0>=RULE_STRING && LA17_0<=RULE_DECIMAL)||(LA17_0>=14 && LA17_0<=15)||LA17_0==28||(LA17_0>=44 && LA17_0<=45)||LA17_0==50||(LA17_0>=58 && LA17_0<=59)||LA17_0==61||LA17_0==64||LA17_0==66||(LA17_0>=69 && LA17_0<=80)) ) {
alt17=2;
}
else {
if (state.backtracking>0) {state.failed=true; return current;}
NoViableAltException nvae =
new NoViableAltException("", 17, 0, input);
throw nvae;
}
switch (alt17) {
case 1 :
// InternalPureXbase.g:655:3: ( () ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( (lv_value_3_0= ruleXAssignment ) ) )
{
// InternalPureXbase.g:655:3: ( () ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( (lv_value_3_0= ruleXAssignment ) ) )
// InternalPureXbase.g:656:4: () ( ( ruleFeatureCallID ) ) ruleOpSingleAssign ( (lv_value_3_0= ruleXAssignment ) )
{
// InternalPureXbase.g:656:4: ()
// InternalPureXbase.g:657:5:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElement(
grammarAccess.getXAssignmentAccess().getXAssignmentAction_0_0(),
current);
}
}
// InternalPureXbase.g:663:4: ( ( ruleFeatureCallID ) )
// InternalPureXbase.g:664:5: ( ruleFeatureCallID )
{
// InternalPureXbase.g:664:5: ( ruleFeatureCallID )
// InternalPureXbase.g:665:6: ruleFeatureCallID
{
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXAssignmentRule());
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAssignmentAccess().getFeatureJvmIdentifiableElementCrossReference_0_1_0());
}
pushFollow(FOLLOW_13);
ruleFeatureCallID();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall();
}
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAssignmentAccess().getOpSingleAssignParserRuleCall_0_2());
}
pushFollow(FOLLOW_3);
ruleOpSingleAssign();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall();
}
// InternalPureXbase.g:686:4: ( (lv_value_3_0= ruleXAssignment ) )
// InternalPureXbase.g:687:5: (lv_value_3_0= ruleXAssignment )
{
// InternalPureXbase.g:687:5: (lv_value_3_0= ruleXAssignment )
// InternalPureXbase.g:688:6: lv_value_3_0= ruleXAssignment
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAssignmentAccess().getValueXAssignmentParserRuleCall_0_3_0());
}
pushFollow(FOLLOW_2);
lv_value_3_0=ruleXAssignment();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXAssignmentRule());
}
set(
current,
"value",
lv_value_3_0,
"org.eclipse.xtext.purexbase.PureXbase.XAssignment");
afterParserOrEnumRuleCall();
}
}
}
}
}
break;
case 2 :
// InternalPureXbase.g:707:3: (this_XConditionalExpression_4= ruleXConditionalExpression ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )? )
{
// InternalPureXbase.g:707:3: (this_XConditionalExpression_4= ruleXConditionalExpression ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )? )
// InternalPureXbase.g:708:4: this_XConditionalExpression_4= ruleXConditionalExpression ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )?
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAssignmentAccess().getXConditionalExpressionParserRuleCall_1_0());
}
pushFollow(FOLLOW_14);
this_XConditionalExpression_4=ruleXConditionalExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XConditionalExpression_4;
afterParserOrEnumRuleCall();
}
// InternalPureXbase.g:716:4: ( ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) ) )?
int alt16=2;
alt16 = dfa16.predict(input);
switch (alt16) {
case 1 :
// InternalPureXbase.g:717:5: ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) ) ( (lv_rightOperand_7_0= ruleXAssignment ) )
{
// InternalPureXbase.g:717:5: ( ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) ) )
// InternalPureXbase.g:718:6: ( ( () ( ( ruleOpMultiAssign ) ) ) )=> ( () ( ( ruleOpMultiAssign ) ) )
{
// InternalPureXbase.g:728:6: ( () ( ( ruleOpMultiAssign ) ) )
// InternalPureXbase.g:729:7: () ( ( ruleOpMultiAssign ) )
{
// InternalPureXbase.g:729:7: ()
// InternalPureXbase.g:730:8:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElementAndSet(
grammarAccess.getXAssignmentAccess().getXBinaryOperationLeftOperandAction_1_1_0_0_0(),
current);
}
}
// InternalPureXbase.g:736:7: ( ( ruleOpMultiAssign ) )
// InternalPureXbase.g:737:8: ( ruleOpMultiAssign )
{
// InternalPureXbase.g:737:8: ( ruleOpMultiAssign )
// InternalPureXbase.g:738:9: ruleOpMultiAssign
{
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXAssignmentRule());
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAssignmentAccess().getFeatureJvmIdentifiableElementCrossReference_1_1_0_0_1_0());
}
pushFollow(FOLLOW_3);
ruleOpMultiAssign();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall();
}
}
}
}
}
// InternalPureXbase.g:754:5: ( (lv_rightOperand_7_0= ruleXAssignment ) )
// InternalPureXbase.g:755:6: (lv_rightOperand_7_0= ruleXAssignment )
{
// InternalPureXbase.g:755:6: (lv_rightOperand_7_0= ruleXAssignment )
// InternalPureXbase.g:756:7: lv_rightOperand_7_0= ruleXAssignment
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXAssignmentAccess().getRightOperandXAssignmentParserRuleCall_1_1_1_0());
}
pushFollow(FOLLOW_2);
lv_rightOperand_7_0=ruleXAssignment();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXAssignmentRule());
}
set(
current,
"rightOperand",
lv_rightOperand_7_0,
"org.eclipse.xtext.purexbase.PureXbase.XAssignment");
afterParserOrEnumRuleCall();
}
}
}
}
break;
}
}
}
break;
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} |
java | public final <R> R get(ChronoFunction<? super T, R> function) {
return function.apply(this.getContext());
} |
java | public static Fraction of(double value) {
final int sign = value < 0 ? -1 : 1;
value = Math.abs(value);
if (value > Integer.MAX_VALUE || Double.isNaN(value)) {
throw new ArithmeticException("The value must not be greater than Integer.MAX_VALUE or NaN");
}
final int wholeNumber = (int) value;
value -= wholeNumber;
int numer0 = 0; // the pre-previous
int denom0 = 1; // the pre-previous
int numer1 = 1; // the previous
int denom1 = 0; // the previous
int numer2 = 0; // the current, setup in calculation
int denom2 = 0; // the current, setup in calculation
int a1 = (int) value;
int a2 = 0;
double x1 = 1;
double x2 = 0;
double y1 = value - a1;
double y2 = 0;
double delta1, delta2 = Double.MAX_VALUE;
double fraction;
int i = 1;
// System.out.println("---");
do {
delta1 = delta2;
a2 = (int) (x1 / y1);
x2 = y1;
y2 = x1 - a2 * y1;
numer2 = a1 * numer1 + numer0;
denom2 = a1 * denom1 + denom0;
fraction = (double) numer2 / (double) denom2;
delta2 = Math.abs(value - fraction);
// System.out.println(numer2 + " " + denom2 + " " + fraction + " " + delta2 + " " + y1);
a1 = a2;
x1 = x2;
y1 = y2;
numer0 = numer1;
denom0 = denom1;
numer1 = numer2;
denom1 = denom2;
i++;
// System.out.println(">>" + delta1 +" "+ delta2+" "+(delta1 > delta2)+" "+i+" "+denom2);
} while (delta1 > delta2 && denom2 <= 10000 && denom2 > 0 && i < 25);
if (i == 25) {
throw new ArithmeticException("Unable to convert double to fraction");
}
return of((numer0 + wholeNumber * denom0) * sign, denom0, true);
} |
java | @Override
@SuppressWarnings("PMD.SystemPrintln")
int execute(OptionsAndArgs pOpts, Object pVm, VirtualMachineHandler pHandler) throws InvocationTargetException, NoSuchMethodException, IllegalAccessException {
String agentUrl = checkAgentUrl(pVm);
boolean quiet = pOpts.isQuiet();
if (agentUrl != null) {
loadAgent(pVm,pOpts,"mode=stop");
if (!quiet) {
System.out.println("Stopped Jolokia for " + getProcessDescription(pOpts,pHandler));
}
return 0;
} else {
if (!quiet) {
System.out.println("Jolokia is not attached to " + getProcessDescription(pOpts,pHandler));
}
return 1;
}
} |
python | def get(self, floating_ip_id):
"""Fetches the floating IP.
:returns: FloatingIp object corresponding to floating_ip_id
"""
fip = self.client.show_floatingip(floating_ip_id).get('floatingip')
self._set_instance_info(fip)
return FloatingIp(fip) |
java | public Class<?> findLoadedOrDefineClass(ClassLoader classLoader, String className, byte[] classbytes)
{
Class<?> klass = findLoadedClass(classLoader, className);
if (klass == null)
{
try
{
klass = defineClass(classLoader, className, classbytes);
} catch (LinkageError ex)
{
klass = findLoadedClass(classLoader, className);
if (klass == null)
{
throw ex;
}
}
}
return klass;
} |
java | public void outputAnnotationIndex(PrintWriter writer) {
for (String ann : annotationIndex.keySet()) {
writer.print(ann);
writer.print(": ");
Set<String> classes = annotationIndex.get(ann);
Iterator<String> it = classes.iterator();
while (it.hasNext()) {
writer.print(it.next());
if (it.hasNext())
writer.print(", ");
}
writer.println();
}
} |
java | public void dereferenceControllable()
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "dereferenceControllable");
destinationManager = null;
aliasDest = null;
index = null;
mpControl = null;
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "dereferenceControllable");
} |
java | public INDArray outputSingle(MultiDataSetIterator iterator){
Preconditions.checkArgument(numOutputArrays == 1, "Cannot use this method with nets that have more" +
" than 1 output array. This network has %s outputs", numOutputArrays);
return output(iterator)[0];
} |
python | def tempdir():
"""Creates a temporary directory"""
directory_path = tempfile.mkdtemp()
def clean_up(): # pylint: disable=missing-docstring
shutil.rmtree(directory_path, onerror=on_error)
with cd(directory_path, clean_up):
yield directory_path |
python | def successors(self):
"""Yield Compounds below self in the hierarchy.
Yields
-------
mb.Compound
The next Particle below self in the hierarchy
"""
if not self.children:
return
for part in self.children:
# Parts local to the current Compound.
yield part
# Parts further down the hierarchy.
for subpart in part.successors():
yield subpart |
python | def find(self, node, path):
"""Wrapper for lxml`s find."""
return node.find(path, namespaces=self.namespaces) |
python | def renderFromWorkitem(self, copied_from, keep=False,
encoding="UTF-8", **kwargs):
"""Render the template directly from some to-be-copied
:class:`rtcclient.workitem.Workitem` without saving to a file
:param copied_from: the to-be-copied
:class:`rtcclient.workitem.Workitem` id
:param keep (default is False): If `True`, some of the below fields
will remain unchangeable with the to-be-copied
:class:`rtcclient.workitem.Workitem`.
Otherwise for `False`.
* teamArea (Team Area)
* ownedBy (Owned By)
* plannedFor(Planned For)
* severity(Severity)
* priority(Priority)
* filedAgainst(Filed Against)
:param encoding (default is "UTF-8"): coding format
:param kwargs: The `kwargs` dict is used to fill the template.
These two parameter are mandatory:
* description
* title
Some of below parameters (which may not be included in some
customized workitem type ) are mandatory if `keep` is set to
`False`; Optional for otherwise.
* teamArea (Team Area)
* ownedBy (Owned By)
* plannedFor(Planned For)
* severity(Severity)
* priority(Priority)
* filedAgainst(Filed Against)
Actually all these needed keywords/attributes/fields can be
retrieved by
:class:`rtcclient.template.Templater.listFieldsFromWorkitem`
:return: the :class:`string` object
:rtype: string
"""
temp = jinja2.Template(self.getTemplate(copied_from,
template_name=None,
template_folder=None,
keep=keep,
encoding=encoding))
return temp.render(**kwargs) |
python | def _from_dict(cls, _dict):
"""Initialize a SemanticRolesResultSubject object from a json dictionary."""
args = {}
if 'text' in _dict:
args['text'] = _dict.get('text')
if 'entities' in _dict:
args['entities'] = [
SemanticRolesEntity._from_dict(x)
for x in (_dict.get('entities'))
]
if 'keywords' in _dict:
args['keywords'] = [
SemanticRolesKeyword._from_dict(x)
for x in (_dict.get('keywords'))
]
return cls(**args) |
java | public static Long toTimestamp(String dateStr, TimeZone tz) {
int length = dateStr.length();
String format;
if (length == 21) {
format = DEFAULT_DATETIME_FORMATS[1];
} else if (length == 22) {
format = DEFAULT_DATETIME_FORMATS[2];
} else if (length == 23) {
format = DEFAULT_DATETIME_FORMATS[3];
} else {
// otherwise fall back to the default
format = DEFAULT_DATETIME_FORMATS[0];
}
return toTimestamp(dateStr, format, tz);
} |
python | def create_protocol(name, **kwargs):
"""
Returns an instance of the protocol with the given name.
:type name: str
:param name: The name of the protocol.
:rtype: Protocol
:return: An instance of the protocol.
"""
cls = protocol_map.get(name)
if not cls:
raise ValueError('Unsupported protocol "%s".' % name)
return cls(**kwargs) |
java | public Future<AuthenticationResult> acquireTokenByRefreshToken(
final String refreshToken, final ClientCredential credential,
final AuthenticationCallback callback) {
return acquireTokenByRefreshToken(refreshToken, credential,
(String) null, callback);
} |
java | public static float copySign(float magnitude, float sign) {
return Math.copySign(magnitude, (Float.isNaN(sign)?1.0f:sign));
} |
java | @Override
public void initialRecoveryFailed(RecoveryAgent recoveryAgent, FailureScope failureScope) throws InvalidFailureScopeException {
if (tc.isEntryEnabled())
Tr.entry(tc, "initialRecoveryFailed", new Object[] { recoveryAgent, failureScope, this });
final boolean removed = removeRecoveryRecord(recoveryAgent, failureScope);
if (!removed) {
if (tc.isEventEnabled())
Tr.event(tc, "The supplied FailureScope was not recognized as outstaning work for this RecoveryAgent");
if (tc.isEntryEnabled())
Tr.exit(tc, "initialRecoveryFailed", "InvalidFailureScopeException");
throw new InvalidFailureScopeException(null);
}
// Once recovery processing has been completed, we need to examine the results and take appropriate
// action.
if (!recoveryOutstanding(failureScope)) {
// Drive the failure callback.
if (_registeredCallbacks != null) {
driveCallBacks(CALLBACK_RECOVERYFAILED, failureScope);
}
// Ensure the failure map is clean for this failure scope.
synchronized (_initFailedFailureScopes) {
_initFailedFailureScopes.remove(failureScope);
}
if (Configuration.localFailureScope().equals(failureScope)) {
// This is the local failure scope. Cause server termination
Configuration.getRecoveryLogComponent().localRecoveryFailed();
} else {
// The is a peer failure scope. Terminate and de-activate to try and allow another member
// of the cluster to recover.
try {
directTermination(failureScope);
} catch (Exception exc) {
}
Configuration.getRecoveryLogComponent().deactivateGroup(failureScope, 60);
}
} else {
// Record this failure so as to ensure correct processing later.
synchronized (_initFailedFailureScopes) {
_initFailedFailureScopes.add(failureScope);
}
// Tell other services about this failure so they can take any required action.
final int failedClientId = recoveryAgent.clientIdentifier();
// Extract the 'values' collection from the _registeredRecoveryAgents map and create an iterator
// from it. This iterator will return ArrayList objects each containing a set of RecoveryAgent
// objects. Each ArrayList corrisponds to a different sequence priority value.
final Collection registeredRecoveryAgentsValues = _registeredRecoveryAgents.values();
final Iterator registeredRecoveryAgentsValuesIterator = registeredRecoveryAgentsValues.iterator();
while (registeredRecoveryAgentsValuesIterator.hasNext()) {
// Extract the next ArrayList and create an iterator from it. This iterator will return RecoveryAgent
// objects that are registered at the same sequence priority value.
final ArrayList registeredRecoveryAgentsArray = (java.util.ArrayList) registeredRecoveryAgentsValuesIterator.next();
final Iterator registeredRecoveryAgentsArrayIterator = registeredRecoveryAgentsArray.iterator();
while (registeredRecoveryAgentsArrayIterator.hasNext()) {
// Extract the next RecoveryAgent object
final RecoveryAgent informRecoveryAgent = (RecoveryAgent) registeredRecoveryAgentsArrayIterator.next();
if (informRecoveryAgent.clientIdentifier() != failedClientId) {
informRecoveryAgent.agentReportedFailure(failedClientId, failureScope);
}
}
}
}
if (tc.isEntryEnabled())
Tr.exit(tc, "initialRecoveryFailed");
} |
java | public void addMessageListener(DigitalChannel channel, MessageListener<? extends Message> messageListener) {
addListener(channel.getIdentifier(), messageListener.getMessageType(), messageListener);
} |
python | def delete_framework(cls, framework=None):
# type: (Optional[Framework]) -> bool
# pylint: disable=W0212
"""
Removes the framework singleton
:return: True on success, else False
"""
if framework is None:
framework = cls.__singleton
if framework is cls.__singleton:
# Stop the framework
try:
framework.stop()
except:
_logger.exception("Error stopping the framework")
# Uninstall its bundles
bundles = framework.get_bundles()
for bundle in bundles:
try:
bundle.uninstall()
except:
_logger.exception(
"Error uninstalling bundle %s",
bundle.get_symbolic_name(),
)
# Clear the event dispatcher
framework._dispatcher.clear()
# Clear the singleton
cls.__singleton = None
return True
return False |
python | def TimestampToRDFDatetime(timestamp):
"""Converts MySQL `TIMESTAMP(6)` columns to datetime objects."""
# TODO(hanuszczak): `timestamp` should be of MySQL type `Decimal`. However,
# it is unclear where this type is actually defined and how to import it in
# order to have a type assertion.
if timestamp is None:
return None
else:
micros = int(1000000 * timestamp)
return rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(micros) |
java | public static Object invokeMethod(final Object obj, final String methodName, final Class<?>[] parameterTypes,
final Object[] args) {
Method method = getAccessibleMethod(obj, methodName, parameterTypes);
if (method == null) {
throw new IllegalArgumentException("Could not find method [" + methodName + "] on target [" + obj + "]");
}
try {
return method.invoke(obj, args);
} catch (Exception e) {
throw convertReflectionExceptionToUnchecked(e);
}
} |
java | public static boolean isValidBuildMetaData(String buildMetaData) {
if (buildMetaData == null) {
return false;
} else if (buildMetaData.isEmpty()) {
return true;
}
return parseID(buildMetaData.toCharArray(), buildMetaData, 0, true, true, false,
null, "") != FAILURE;
} |
java | public static ns_detail_vlan[] get(nitro_service client) throws Exception
{
ns_detail_vlan resource = new ns_detail_vlan();
resource.validate("get");
return (ns_detail_vlan[]) resource.get_resources(client);
} |
java | public Flux<ConfigurationSetting> listSettingRevisions(SettingSelector selector) {
Mono<PagedResponse<ConfigurationSetting>> result;
if (selector != null) {
String fields = getSelectQuery(selector.fields());
result = service.listKeyValueRevisions(serviceEndpoint, selector.key(), selector.label(), fields, selector.acceptDateTime(), null);
} else {
result = service.listKeyValueRevisions(serviceEndpoint, null, null, null, null, null);
}
return result.flatMapMany(this::extractAndFetchConfigurationSettings);
} |
python | def _batch_iterator(self, N=1):
"""Returns N lists of records.
This can be used on any iterator, for example to batch up
SeqRecord objects from Bio.SeqIO.parse(...), or to batch
Alignment objects from Bio.AlignIO.parse(...), or simply
lines from a file handle.
This is a generator function, and it returns lists of the
entries from the supplied iterator. Each list will have
batch_size entries, although the final list may be shorter.
"""
batch_size = math.ceil(self.num_records / float(N))
handle = self._open(self.filename)
while True:
batch = list(islice(handle, batch_size))
if not batch:
break
yield batch |
python | def ensure_init(path):
'''
ensure directories leading up to path are importable, omitting
parent directory, eg path='/hooks/helpers/foo'/:
hooks/
hooks/helpers/__init__.py
hooks/helpers/foo/__init__.py
'''
for d, dirs, files in os.walk(os.path.join(*path.split('/')[:2])):
_i = os.path.join(d, '__init__.py')
if not os.path.exists(_i):
logging.info('Adding missing __init__.py: %s' % _i)
open(_i, 'wb').close() |
java | private void initialize() {
// enables the options dialog to be in front, but an modal dialog
// stays on top of the main application window, but doesn't block childs
// Examples of childs: help window and client certificate viewer
this.setModalityType(ModalityType.DOCUMENT_MODAL);
if (Model.getSingleton().getOptionsParam().getViewParam().getWmUiHandlingOption() == 0) {
this.setSize(500, 375);
}
this.setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
this.setContentPane(getJContentPane());
} |
java | public List<UIComponent> getComponentResources(FacesContext context,
String target) {
if (target == null) {
throw new NullPointerException();
}
List<UIComponent> resources = getComponentResources(context,
target,
false);
return ((resources != null)
? resources
: Collections.<UIComponent>emptyList());
} |
python | def compiler(name):
"""
Get a usable clang++ plumbum command.
This searches for a usable clang++ in the llvm binary path
Returns:
plumbum Command that executes clang++
"""
pinfo = __get_paths()
_compiler = local[name]
_compiler = _compiler.setenv(
PATH=pinfo["path"], LD_LIBRARY_PATH=pinfo["ld_library_path"])
return _compiler |
python | def get_instances(self):
"""
Returns a flat list of the names of services created
in this space.
"""
services = []
for resource in self._get_instances():
services.append(resource['entity']['name'])
return services |
java | void assertParent(Collection<Class<? extends Element>> permittedParents) throws InvalidInputException {
if (!permittedParents.contains(this.getParent().getClass())) {
throw new InvalidInputException("Element \"" + this.getMessageMLTag() + "\" is not allowed as a child of \""
+ this.getParent().getMessageMLTag() + "\"");
}
} |
java | public SparseDoubleVector getColumnVector(int column) {
checkIndices(0, column);
SparseDoubleVector columnValues =
new SparseHashDoubleVector(values.length);
columnValues.set(column, values[column]);
return columnValues;
} |
python | def start(nick, host, port=6667, username=None, password=None, channels=None, use_ssl=False, use_sasl=False,
char='!', allow_hosts=False, allow_nicks=False, disable_query=True):
'''
IRC Bot for interacting with salt.
nick
Nickname of the connected Bot.
host
irc server (example - chat.freenode.net).
port
irc port. Default: 6667
password
password for authenticating. If not provided, user will not authenticate on the irc server.
channels
channels to join.
use_ssl
connect to server using ssl. Default: False
use_sasl
authenticate using sasl, instead of messaging NickServ. Default: False
.. note:: This will allow the bot user to be fully authenticated before joining any channels
char
command character to look for. Default: !
allow_hosts
hostmasks allowed to use commands on the bot. Default: False
True to allow all
False to allow none
List of regexes to allow matching
allow_nicks
Nicks that are allowed to use commands on the bot. Default: False
True to allow all
False to allow none
List of regexes to allow matching
disable_query
Disable commands from being sent through private queries. Require they be sent to a channel, so that all
communication can be controlled by access to the channel. Default: True
.. warning:: Unauthenticated Access to event stream
This engine sends events calls to the event stream without authenticating them in salt. Authentication will
need to be configured and enforced on the irc server or enforced in the irc channel. The engine only accepts
commands from channels, so non authenticated users could be banned or quieted in the channel.
/mode +q $~a # quiet all users who are not authenticated
/mode +r # do not allow unauthenticated users into the channel
It would also be possible to add a password to the irc channel, or only allow invited users to join.
'''
client = IRCClient(nick, host, port, username, password, channels or [], use_ssl, use_sasl, char,
allow_hosts, allow_nicks, disable_query)
client.io_loop.start() |
python | def fs_cache(app_name='', cache_type='', idx=1,
expires=DEFAULT_EXPIRES, cache_dir='', helper_class=_FSCacher):
"""
A decorator to cache results of functions returning
pd.DataFrame or pd.Series objects under:
<cache_dir>/<app_name>/<cache_type>/<func_name>.<param_string>.csv,
missing parts, like app_name and cache_type, will be omitted
If cache_dir is omitted, stutils 'ST_FS_CACHE_PATH' conf dir will be used.
If 'ST_FS_CACHE_PATH' is not configured, a temporary directory
will be created.
:param app_name: if present, cache files for this application will be
stored in a separate folder
:param idx: number of columns to use as an index
:param cache_type: if present, cache files within app directory will be
separated into different folders by their cache_type.
:param expires: cache duration in seconds
:param cache_dir: set custom file cache path
"""
def decorator(func):
return helper_class(func, cache_dir, app_name, cache_type, idx, expires)
return decorator |
java | public String remainder() {
final String remainder = queue.substring(pos, queue.length());
pos = queue.length();
return remainder;
} |
java | public void putMany(final Iterable<A> items, final PipelineContext context) {
final Iterable<S> transforming = new TransformingIterable(items, context);
sink.putMany(storedType, transforming, context);
} |
python | def create(self, bucket, descriptor, force=False):
"""https://github.com/frictionlessdata/tableschema-bigquery-py#storage
"""
# Make lists
buckets = bucket
if isinstance(bucket, six.string_types):
buckets = [bucket]
descriptors = descriptor
if isinstance(descriptor, dict):
descriptors = [descriptor]
# Iterate over buckets/descriptors
for bucket, descriptor in zip(buckets, descriptors):
# Existent bucket
if bucket in self.buckets:
if not force:
message = 'Bucket "%s" already exists' % bucket
raise tableschema.exceptions.StorageError(message)
self.delete(bucket)
# Prepare job body
tableschema.validate(descriptor)
table_name = self.__mapper.convert_bucket(bucket)
converted_descriptor, fallbacks = self.__mapper.convert_descriptor(descriptor)
body = {
'tableReference': {
'projectId': self.__project,
'datasetId': self.__dataset,
'tableId': table_name,
},
'schema': converted_descriptor,
}
# Make request
self.__service.tables().insert(
projectId=self.__project,
datasetId=self.__dataset,
body=body).execute()
# Add to descriptors/fallbacks
self.__descriptors[bucket] = descriptor
self.__fallbacks[bucket] = fallbacks
# Remove buckets cache
self.__buckets = None |
python | def categories_for_actions(actions):
"""
Given an iterable of actions, return a mapping of action groups.
actions: {'ec2:authorizesecuritygroupingress', 'iam:putrolepolicy', 'iam:listroles'}
Returns:
{
'ec2': {'Write'},
'iam': {'Permissions', 'List'})
}
"""
groups = defaultdict(set)
for action in actions:
service = action.split(':')[0]
groups[service].add(_action_categories.get(action))
return groups |
java | private static int getType(int ch)
{
if (UCharacterUtility.isNonCharacter(ch)) {
// not a character we return a invalid category count
return NON_CHARACTER_;
}
int result = UCharacter.getType(ch);
if (result == UCharacterCategory.SURROGATE) {
if (ch <= UTF16.LEAD_SURROGATE_MAX_VALUE) {
result = LEAD_SURROGATE_;
}
else {
result = TRAIL_SURROGATE_;
}
}
return result;
} |
java | public Expression asCheap() {
if (isCheap()) {
return this;
}
return new Expression(resultType, features.plus(Feature.CHEAP)) {
@Override
protected void doGen(CodeBuilder adapter) {
Expression.this.gen(adapter);
}
};
} |
java | public static Object applyEqualityOperator
(Object pLeft,
Object pRight,
EqualityOperator pOperator,
Logger pLogger)
throws ELException {
if (pLeft == pRight) {
return PrimitiveObjects.getBoolean(pOperator.apply(true, pLogger));
} else if (pLeft == null ||
pRight == null) {
return PrimitiveObjects.getBoolean(pOperator.apply(false, pLogger));
} else if (isFloatingPointType(pLeft) ||
isFloatingPointType(pRight)) {
double left =
coerceToPrimitiveNumber(pLeft, Double.class, pLogger).
doubleValue();
double right =
coerceToPrimitiveNumber(pRight, Double.class, pLogger).
doubleValue();
return
PrimitiveObjects.getBoolean
(pOperator.apply(left == right, pLogger));
} else if (isIntegerType(pLeft) ||
isIntegerType(pRight)) {
long left =
coerceToPrimitiveNumber(pLeft, Long.class, pLogger).
longValue();
long right =
coerceToPrimitiveNumber(pRight, Long.class, pLogger).
longValue();
return
PrimitiveObjects.getBoolean
(pOperator.apply(left == right, pLogger));
} else if (pLeft instanceof Boolean ||
pRight instanceof Boolean) {
boolean left = coerceToBoolean(pLeft, pLogger).booleanValue();
boolean right = coerceToBoolean(pRight, pLogger).booleanValue();
return
PrimitiveObjects.getBoolean
(pOperator.apply(left == right, pLogger));
} else if (pLeft instanceof String ||
pRight instanceof String) {
String left = coerceToString(pLeft, pLogger);
String right = coerceToString(pRight, pLogger);
return
PrimitiveObjects.getBoolean
(pOperator.apply(left.equals(right), pLogger));
} else {
try {
return
PrimitiveObjects.getBoolean
(pOperator.apply(pLeft.equals(pRight), pLogger));
}
catch (Exception exc) {
if (pLogger.isLoggingError()) {
pLogger.logError
(Constants.ERROR_IN_EQUALS,
exc,
pLeft.getClass().getName(),
pRight.getClass().getName(),
pOperator.getOperatorSymbol());
}
return Boolean.FALSE;
}
}
} |
python | def set_identifier(self, uid):
"""
Sets unique id for this epub
:Args:
- uid: Value of unique identifier for this book
"""
self.uid = uid
self.set_unique_metadata('DC', 'identifier', self.uid, {'id': self.IDENTIFIER_ID}) |
python | def mark_entities_to_export(self, export_config):
"""
Apply the specified :class:`meteorpi_model.ExportConfiguration` to the database, running its contained query and
creating rows in t_observationExport or t_fileExport for matching entities.
:param ExportConfiguration export_config:
An instance of :class:`meteorpi_model.ExportConfiguration` to apply.
:returns:
The integer number of rows added to the export tables
"""
# Retrieve the internal ID of the export configuration, failing if it hasn't been stored
self.con.execute('SELECT uid FROM archive_exportConfig WHERE exportConfigID = %s;',
(export_config.config_id,))
export_config_id = self.con.fetchall()
if len(export_config_id) < 1:
raise ValueError("Attempt to run export on ExportConfiguration not in database")
export_config_id = export_config_id[0]['uid']
# If the export is inactive then do nothing
if not export_config.enabled:
return 0
# Track the number of rows created, return it later
rows_created = 0
# Handle ObservationSearch
if isinstance(export_config.search, mp.ObservationSearch):
# Create a deep copy of the search and set the properties required when creating exports
search = mp.ObservationSearch.from_dict(export_config.search.as_dict())
search.exclude_export_to = export_config.config_id
b = search_observations_sql_builder(search)
self.con.execute(b.get_select_sql(columns='o.uid, o.obsTime'), b.sql_args)
for result in self.con.fetchall():
self.con.execute('INSERT INTO archive_observationExport '
'(observationId, obsTime, exportConfig, exportState) '
'VALUES (%s,%s,%s,%s)', (result['uid'], result['obsTime'], export_config_id, 1))
rows_created += 1
# Handle FileSearch
elif isinstance(export_config.search, mp.FileRecordSearch):
# Create a deep copy of the search and set the properties required when creating exports
search = mp.FileRecordSearch.from_dict(export_config.search.as_dict())
search.exclude_export_to = export_config.config_id
b = search_files_sql_builder(search)
self.con.execute(b.get_select_sql(columns='f.uid, f.fileTime'), b.sql_args)
for result in self.con.fetchall():
self.con.execute('INSERT INTO archive_fileExport '
'(fileId, fileTime, exportConfig, exportState) '
'VALUES (%s,%s,%s,%s)', (result['uid'], result['fileTime'], export_config_id, 1))
rows_created += 1
# Handle ObservatoryMetadataSearch
elif isinstance(export_config.search, mp.ObservatoryMetadataSearch):
# Create a deep copy of the search and set the properties required when creating exports
search = mp.ObservatoryMetadataSearch.from_dict(export_config.search.as_dict())
search.exclude_export_to = export_config.config_id
b = search_metadata_sql_builder(search)
self.con.execute(b.get_select_sql(columns='m.uid, m.setAtTime'), b.sql_args)
for result in self.con.fetchall():
self.con.execute('INSERT INTO archive_metadataExport '
'(metadataId, setAtTime, exportConfig, exportState) '
'VALUES (%s,%s,%s,%s)', (result['uid'], result['setAtTime'], export_config_id, 1))
rows_created += 1
# Complain if it's anything other than these two (nothing should be at the moment but we might introduce
# more search types in the future
else:
raise ValueError("Unknown search type %s" % str(type(export_config.search)))
return rows_created |
python | def class_balancing_sampler(y, indices):
"""
Construct a `WeightedSubsetSampler` that compensates for class
imbalance.
Parameters
----------
y: NumPy array, 1D dtype=int
sample classes, values must be 0 or positive
indices: NumPy array, 1D dtype=int
An array of indices that identify the subset of samples drawn
from data that are to be used
Returns
-------
WeightedSubsetSampler instance
Sampler
"""
weights = WeightedSampler.class_balancing_sample_weights(y[indices])
return WeightedSubsetSampler(weights, indices=indices) |
java | @Override
protected Type getReturnType(final int mOp1, final int mOp2) throws TTXPathException {
Type type1;
Type type2;
try {
type1 = Type.getType(mOp1).getPrimitiveBaseType();
type2 = Type.getType(mOp2).getPrimitiveBaseType();
} catch (final IllegalStateException e) {
throw new XPathError(ErrorType.XPTY0004);
}
if (type1.isNumericType() && type2.isNumericType()) {
// if both have the same numeric type, return it
if (type1 == type2) {
return type1;
}
if (type1 == Type.DOUBLE || type2 == Type.DOUBLE) {
return Type.DOUBLE;
} else if (type1 == Type.FLOAT || type2 == Type.FLOAT) {
return Type.FLOAT;
} else {
assert (type1 == Type.DECIMAL || type2 == Type.DECIMAL);
return Type.DECIMAL;
}
} else {
switch (type1) {
case YEAR_MONTH_DURATION:
if (type2 == Type.YEAR_MONTH_DURATION) {
return Type.DECIMAL;
}
if (type2.isNumericType()) {
return type1;
}
break;
case DAY_TIME_DURATION:
if (type2 == Type.DAY_TIME_DURATION) {
return Type.DECIMAL;
}
if (type2.isNumericType()) {
return type1;
}
break;
default:
throw new XPathError(ErrorType.XPTY0004);
}
throw new XPathError(ErrorType.XPTY0004);
}
} |
java | protected synchronized boolean cancel(Object x) {
// First check the expedited buffer
synchronized (lock) {
if (expeditedPutIndex > expeditedTakeIndex) {
for (int i = expeditedTakeIndex; i < expeditedPutIndex; i++) {
if (expeditedBuffer[i] == x) {
System.arraycopy(expeditedBuffer, i + 1, expeditedBuffer, i, expeditedPutIndex - i - 1);
expeditedPutIndex--;
expeditedBuffer[expeditedPutIndex] = null;
numberOfUsedExpeditedSlots.getAndDecrement(); // D615053
return true;
}
}
} else if (expeditedPutIndex != expeditedTakeIndex || expeditedBuffer[expeditedTakeIndex] != null) {
for (int i = expeditedTakeIndex; i < buffer.length; i++) {
if (expeditedBuffer[i] == x) {
if (i != expeditedBuffer.length - 1) {
System.arraycopy(expeditedBuffer, i + 1, expeditedBuffer, i, expeditedBuffer.length - i - 1);
}
if (expeditedPutIndex != 0) {
expeditedBuffer[expeditedBuffer.length - 1] = expeditedBuffer[0];
System.arraycopy(expeditedBuffer, 1, expeditedBuffer, 0, expeditedPutIndex - 1);
expeditedPutIndex--;
} else {
expeditedPutIndex = expeditedBuffer.length - 1;
}
expeditedBuffer[expeditedPutIndex] = null;
numberOfUsedExpeditedSlots.getAndDecrement(); // D615053
return true;
}
}
// D610567 - Scan first section of expedited BoundedBuffer
for (int i = 0; i < expeditedPutIndex; i++) {
if (expeditedBuffer[i] == x) {
System.arraycopy(expeditedBuffer, i + 1, expeditedBuffer, i, expeditedPutIndex - i - 1);
expeditedPutIndex--;
expeditedBuffer[expeditedPutIndex] = null;
numberOfUsedExpeditedSlots.getAndDecrement(); // D615053
return true;
}
}
}
// Next check the main buffer
if (putIndex > takeIndex) {
for (int i = takeIndex; i < putIndex; i++) {
if (buffer[i] == x) {
System.arraycopy(buffer, i + 1, buffer, i, putIndex - i - 1);
putIndex--;
buffer[putIndex] = null;
numberOfUsedSlots.getAndDecrement(); // D615053
return true;
}
}
} else if (putIndex != takeIndex || buffer[takeIndex] != null) {
for (int i = takeIndex; i < buffer.length; i++) {
if (buffer[i] == x) {
if (i != buffer.length - 1) {
System.arraycopy(buffer, i + 1, buffer, i, buffer.length - i - 1);
}
if (putIndex != 0) {
buffer[buffer.length - 1] = buffer[0];
System.arraycopy(buffer, 1, buffer, 0, putIndex - 1);
putIndex--;
} else {
putIndex = buffer.length - 1;
}
buffer[putIndex] = null;
numberOfUsedSlots.getAndDecrement(); // D615053
return true;
}
}
// D610567 - Scan first section of BoundedBuffer
for (int i = 0; i < putIndex; i++) {
if (buffer[i] == x) {
System.arraycopy(buffer, i + 1, buffer, i, putIndex - i - 1);
putIndex--;
buffer[putIndex] = null;
numberOfUsedSlots.getAndDecrement(); // D615053
return true;
}
}
}
}
return false;
} |
python | def pformat(tree):
"""Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
"""
if tree.empty():
return ''
buf = six.StringIO()
for line in _pformat(tree.root, 0):
buf.write(line + "\n")
return buf.getvalue().strip() |
python | def expand(string, vars, local_vars={}):
"""Expand a string containing $vars as Ninja would.
Note: doesn't handle the full Ninja variable syntax, but it's enough
to make configure.py's use of it work.
"""
def exp(m):
var = m.group(1)
if var == '$':
return '$'
return local_vars.get(var, vars.get(var, ''))
return re.sub(r'\$(\$|\w*)', exp, string) |
java | private RValue executeVariableAccess(Expr.VariableAccess expr, CallStack frame) {
Decl.Variable decl = expr.getVariableDeclaration();
return frame.getLocal(decl.getName());
} |
python | def pairwise(reference_intervals, reference_labels,
estimated_intervals, estimated_labels,
frame_size=0.1, beta=1.0):
"""Frame-clustering segmentation evaluation by pair-wise agreement.
Examples
--------
>>> (ref_intervals,
... ref_labels) = mir_eval.io.load_labeled_intervals('ref.lab')
>>> (est_intervals,
... est_labels) = mir_eval.io.load_labeled_intervals('est.lab')
>>> # Trim or pad the estimate to match reference timing
>>> (ref_intervals,
... ref_labels) = mir_eval.util.adjust_intervals(ref_intervals,
... ref_labels,
... t_min=0)
>>> (est_intervals,
... est_labels) = mir_eval.util.adjust_intervals(
... est_intervals, est_labels, t_min=0, t_max=ref_intervals.max())
>>> precision, recall, f = mir_eval.structure.pairwise(ref_intervals,
... ref_labels,
... est_intervals,
... est_labels)
Parameters
----------
reference_intervals : np.ndarray, shape=(n, 2)
reference segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
reference_labels : list, shape=(n,)
reference segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_intervals : np.ndarray, shape=(m, 2)
estimated segment intervals, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
estimated_labels : list, shape=(m,)
estimated segment labels, in the format returned by
:func:`mir_eval.io.load_labeled_intervals`.
frame_size : float > 0
length (in seconds) of frames for clustering
(Default value = 0.1)
beta : float > 0
beta value for F-measure
(Default value = 1.0)
Returns
-------
precision : float > 0
Precision of detecting whether frames belong in the same cluster
recall : float > 0
Recall of detecting whether frames belong in the same cluster
f : float > 0
F-measure of detecting whether frames belong in the same cluster
"""
validate_structure(reference_intervals, reference_labels,
estimated_intervals, estimated_labels)
# Check for empty annotations. Don't need to check labels because
# validate_structure makes sure they're the same size as intervals
if reference_intervals.size == 0 or estimated_intervals.size == 0:
return 0., 0., 0.
# Generate the cluster labels
y_ref = util.intervals_to_samples(reference_intervals,
reference_labels,
sample_size=frame_size)[-1]
y_ref = util.index_labels(y_ref)[0]
# Map to index space
y_est = util.intervals_to_samples(estimated_intervals,
estimated_labels,
sample_size=frame_size)[-1]
y_est = util.index_labels(y_est)[0]
# Build the reference label agreement matrix
agree_ref = np.equal.outer(y_ref, y_ref)
# Count the unique pairs
n_agree_ref = (agree_ref.sum() - len(y_ref)) / 2.0
# Repeat for estimate
agree_est = np.equal.outer(y_est, y_est)
n_agree_est = (agree_est.sum() - len(y_est)) / 2.0
# Find where they agree
matches = np.logical_and(agree_ref, agree_est)
n_matches = (matches.sum() - len(y_ref)) / 2.0
precision = n_matches / n_agree_est
recall = n_matches / n_agree_ref
f_measure = util.f_measure(precision, recall, beta=beta)
return precision, recall, f_measure |
python | def _execute_cell(args, cell_body):
"""Implements the BigQuery cell magic used to execute BQ queries.
The supported syntax is:
%%bigquery execute [-q|--sql <query identifier>] <other args>
[<YAML or JSON cell_body or inline SQL>]
Args:
args: the arguments following '%bigquery execute'.
cell_body: optional contents of the cell interpreted as YAML or JSON.
Returns:
The QueryResultsTable
"""
query = _get_query_argument(args, cell_body, datalab.utils.commands.notebook_environment())
if args['verbose']:
print(query.sql)
return query.execute(args['target'], table_mode=args['mode'], use_cache=not args['nocache'],
allow_large_results=args['large'], dialect=args['dialect'],
billing_tier=args['billing']).results |
python | def _contents_changed(self):
"""Activate submit_btn."""
desc_chars = (len(self.input_description.toPlainText()) -
self.initial_chars)
if desc_chars < DESC_MIN_CHARS:
self.desc_chars_label.setText(
u"{} {}".format(DESC_MIN_CHARS - desc_chars,
_("more characters to go...")))
else:
self.desc_chars_label.setText(_("Description complete; thanks!"))
title_chars = len(self.title.text())
if title_chars < TITLE_MIN_CHARS:
self.title_chars_label.setText(
u"{} {}".format(TITLE_MIN_CHARS - title_chars,
_("more characters to go...")))
else:
self.title_chars_label.setText(_("Title complete; thanks!"))
submission_enabled = (desc_chars >= DESC_MIN_CHARS and
title_chars >= TITLE_MIN_CHARS)
self.submit_btn.setEnabled(submission_enabled) |
python | def time_snowflake(datetime_obj, high=False):
"""Returns a numeric snowflake pretending to be created at the given date.
When using as the lower end of a range, use time_snowflake(high=False) - 1 to be inclusive, high=True to be exclusive
When using as the higher end of a range, use time_snowflake(high=True) + 1 to be inclusive, high=False to be exclusive
Parameters
-----------
datetime_obj
A timezone-naive datetime object representing UTC time.
high: :class:`bool`
Whether or not to set the lower 22 bit to high or low.
"""
unix_seconds = (datetime_obj - type(datetime_obj)(1970, 1, 1)).total_seconds()
discord_millis = int(unix_seconds * 1000 - DISCORD_EPOCH)
return (discord_millis << 22) + (2**22-1 if high else 0) |
python | def norm_score(self):
"""Return the normalized score.
Equals 1.0 for a z-score of 0, falling to 0.0 for extremely positive
or negative values.
"""
cdf = (1.0 + math.erf(self.score / math.sqrt(2.0))) / 2.0
return 1 - 2*math.fabs(0.5 - cdf) |
java | public static void assertEqualBeans(Object expected, Object actual) throws ComparisonFailure {
// Do *NOT* rely on expected/actual java.lang.Object.equals(Object);
// and obviously neither e.g. java.util.Objects.equals(Object, Object) based on. it
final String expectedAsText = generator.getExpression(expected);
assertEqualByText(expectedAsText, actual);
} |
python | def pvsyst_celltemp(self, poa_global, temp_air, wind_speed=1.0):
"""Uses :py:func:`pvsyst_celltemp` to calculate module temperatures
based on ``self.racking_model`` and the input parameters.
Parameters
----------
See pvsystem.pvsyst_celltemp for details
Returns
-------
See pvsystem.pvsyst_celltemp for details
"""
kwargs = _build_kwargs(['eta_m', 'alpha_absorption'],
self.module_parameters)
return pvsyst_celltemp(poa_global, temp_air, wind_speed,
model_params=self.racking_model, **kwargs) |
python | def filter(self, data):
"""
Filters the dataset(s). When providing a list, this can be used to create compatible train/test sets,
since the filter only gets initialized with the first dataset and all subsequent datasets get transformed
using the same setup.
NB: inputformat(Instances) must have been called beforehand.
:param data: the Instances to filter
:type data: Instances or list of Instances
:return: the filtered Instances object(s)
:rtype: Instances or list of Instances
"""
if isinstance(data, list):
result = []
for d in data:
result.append(Instances(javabridge.static_call(
"Lweka/filters/Filter;", "useFilter",
"(Lweka/core/Instances;Lweka/filters/Filter;)Lweka/core/Instances;",
d.jobject, self.jobject)))
return result
else:
return Instances(javabridge.static_call(
"Lweka/filters/Filter;", "useFilter",
"(Lweka/core/Instances;Lweka/filters/Filter;)Lweka/core/Instances;",
data.jobject, self.jobject)) |
java | private Artifact getArtifactFromMavenCoordinates(final String artifact) throws MojoFailureException {
String[] parts = StringUtils.split(artifact, ":");
String version;
String packaging = null;
String classifier = null;
switch (parts.length) {
case 3:
// groupId:artifactId:version
version = parts[2];
break;
case 4:
// groupId:artifactId:packaging:version
packaging = parts[2];
version = parts[3];
break;
case 5:
// groupId:artifactId:packaging:classifier:version
packaging = parts[2];
classifier = parts[3];
version = parts[4];
break;
default:
throw new MojoFailureException("Invalid artifact: " + artifact);
}
String groupId = parts[0];
String artifactId = parts[1];
return createArtifact(artifactId, groupId, version, packaging, classifier);
} |
python | def import_json():
'''
Import a json module, starting with the quick ones and going down the list)
'''
for fast_json in ('ujson', 'yajl', 'json'):
try:
mod = __import__(fast_json)
log.trace('loaded %s json lib', fast_json)
return mod
except ImportError:
continue |
java | private Page createPageWithExtractedDictionary(Page page)
{
Block[] blocks = new Block[page.getChannelCount()];
Block dictionary = ((DictionaryBlock) page.getBlock(channels[0])).getDictionary();
// extract data dictionary
blocks[channels[0]] = dictionary;
// extract hash dictionary
if (inputHashChannel.isPresent()) {
blocks[inputHashChannel.get()] = ((DictionaryBlock) page.getBlock(inputHashChannel.get())).getDictionary();
}
return new Page(dictionary.getPositionCount(), blocks);
} |
java | @Override
protected boolean parseCmdLineArgs(String[] args) throws IllegalArgumentException {
if (action.equals(ACTION_HELP)) {
if (args.length == 0)
helpAction(null);
else
helpAction(args[0]);
return true;
}
if (action.equals(ACTION_VIEW)) {
parseOptions(args, viewActionOptions);
return false;
} else if (action.equals(ACTION_LISTINSTANCES)) {
parseOptions(args, listInstancesActionOptions);
setListInstances(true);
return false;
} else if (action.equals(ACTION_COPY)) {
parseOptions(args, copyActionOptions);
return false;
}
return true;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.