language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public OvhNetbootOption serviceName_boot_bootId_option_option_GET(String serviceName, Long bootId, net.minidev.ovh.api.dedicated.server.OvhBootOptionEnum option) throws IOException {
String qPath = "/dedicated/server/{serviceName}/boot/{bootId}/option/{option}";
StringBuilder sb = path(qPath, serviceName, bootId, option);
String resp = exec(qPath, "GET", sb.toString(), null);
return convertTo(resp, OvhNetbootOption.class);
} |
java | public static <T extends Comparable<T>> List<T> sorted(Iterable<T> items) {
List<T> result = toList(items);
Collections.sort(result);
return result;
} |
java | public static Form form() {
return Form.create()
.with(defaultNameField())
.url()
.with(Text.of("user").label("User").length(16).optional())
.with(Password.of("password").label("Password").length(40).optional());
} |
java | @Override
public final Iterable<NoteDocument> findAll(
final RootDocument rootDocument) {
final Iterable<NoteDocument> noteDocuments =
findAll(rootDocument.getFilename());
if (noteDocuments == null) {
return null;
}
for (final NoteDocument noteDocument : noteDocuments) {
final Note note = noteDocument.getGedObject();
note.setParent(rootDocument.getGedObject());
}
return noteDocuments;
} |
python | def _apply_nested_privacy(self, data):
""" Apply privacy to nested documents.
:param data: Dict of data to which privacy is already applied.
"""
kw = {
'is_admin': self.is_admin,
'drop_hidden': self.drop_hidden,
}
for key, val in data.items():
if is_document(val):
data[key] = apply_privacy(self.request)(result=val, **kw)
elif isinstance(val, list) and val and is_document(val[0]):
data[key] = [apply_privacy(self.request)(result=doc, **kw)
for doc in val]
return data |
python | def criteria_extract(crit_file='criteria.txt', output_file='criteria.xls',
output_dir_path='.', input_dir_path='', latex=False):
"""
Extracts criteria from a MagIC 3.0 format criteria.txt file.
Default output format is an Excel file.
typeset with latex on your own computer.
Parameters
___________
crit_file : str, default "criteria.txt"
input file name
output_file : str, default "criteria.xls"
output file name
output_dir_path : str, default "."
output file directory
input_dir_path : str, default ""
path for intput file if different from output_dir_path (default is same)
latex : boolean, default False
if True, output file should be latex formatted table with a .tex ending
Return :
[True,False], data table error type : True if successful
Effects :
writes xls or latex formatted tables for use in publications
"""
input_dir_path, output_dir_path = pmag.fix_directories(input_dir_path, output_dir_path)
try:
fname = pmag.resolve_file_name(crit_file, input_dir_path)
except IOError:
print("bad criteria file name")
return False, "bad criteria file name"
crit_df = pd.read_csv(fname, sep='\t', header=1)
if len(crit_df) > 0:
out_file = pmag.resolve_file_name(output_file, output_dir_path)
s = crit_df['table_column'].str.split(pat='.', expand=True)
crit_df['table'] = s[0]
crit_df['column'] = s[1]
crit_df = crit_df[['table', 'column',
'criterion_value', 'criterion_operation']]
crit_df.columns = ['Table', 'Statistic', 'Threshold', 'Operation']
if latex:
if out_file.endswith('.xls'):
out_file = out_file.rsplit('.')[0] + ".tex"
crit_df.loc[crit_df['Operation'].str.contains(
'<'), 'operation'] = 'maximum'
crit_df.loc[crit_df['Operation'].str.contains(
'>'), 'operation'] = 'minimum'
crit_df.loc[crit_df['Operation'] == '=', 'operation'] = 'equal to'
info_out = open(out_file, 'w+', errors="backslashreplace")
info_out.write('\documentclass{article}\n')
info_out.write('\\usepackage{booktabs}\n')
# info_out.write('\\usepackage{longtable}\n')
# T1 will ensure that symbols like '<' are formatted correctly
info_out.write("\\usepackage[T1]{fontenc}\n")
info_out.write('\\begin{document}')
info_out.write(crit_df.to_latex(index=False, longtable=False,
escape=True, multicolumn=False))
info_out.write('\end{document}\n')
info_out.close()
else:
crit_df.to_excel(out_file, index=False)
else:
print("No criteria for ouput.")
return True, [out_file] |
java | public void inherit(DocFinder.Input input, DocFinder.Output output) {
ClassDoc exception;
if (input.tagId == null) {
ThrowsTag throwsTag = (ThrowsTag) input.tag;
exception = throwsTag.exception();
input.tagId = exception == null ?
throwsTag.exceptionName() :
throwsTag.exception().qualifiedName();
} else {
exception = input.element.containingClass().findClass(input.tagId);
}
ThrowsTag[] tags = ((MethodDoc)input.element).throwsTags();
for (int i = 0; i < tags.length; i++) {
if (input.tagId.equals(tags[i].exceptionName()) ||
(tags[i].exception() != null &&
(input.tagId.equals(tags[i].exception().qualifiedName())))) {
output.holder = input.element;
output.holderTag = tags[i];
output.inlineTags = input.isFirstSentence ?
tags[i].firstSentenceTags() : tags[i].inlineTags();
output.tagList.add(tags[i]);
} else if (exception != null && tags[i].exception() != null &&
tags[i].exception().subclassOf(exception)) {
output.tagList.add(tags[i]);
}
}
} |
java | @Override
public void setListProperty(String propertyName, List<String> values) {
setProperty(propertyName, values);
} |
java | public final EObject ruleXConstructorCall() throws RecognitionException {
EObject current = null;
Token otherlv_1=null;
Token otherlv_3=null;
Token otherlv_5=null;
Token otherlv_7=null;
Token lv_explicitConstructorCall_8_0=null;
Token otherlv_11=null;
Token otherlv_13=null;
EObject lv_typeArguments_4_0 = null;
EObject lv_typeArguments_6_0 = null;
EObject lv_arguments_9_0 = null;
EObject lv_arguments_10_0 = null;
EObject lv_arguments_12_0 = null;
EObject lv_arguments_14_0 = null;
enterRule();
try {
// InternalXbase.g:4425:2: ( ( () otherlv_1= 'new' ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_3= '<' ) ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) ) (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )* otherlv_7= '>' )? ( ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) ) ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )? otherlv_13= ')' )? ( ( ( () '[' ) )=> (lv_arguments_14_0= ruleXClosure ) )? ) )
// InternalXbase.g:4426:2: ( () otherlv_1= 'new' ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_3= '<' ) ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) ) (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )* otherlv_7= '>' )? ( ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) ) ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )? otherlv_13= ')' )? ( ( ( () '[' ) )=> (lv_arguments_14_0= ruleXClosure ) )? )
{
// InternalXbase.g:4426:2: ( () otherlv_1= 'new' ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_3= '<' ) ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) ) (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )* otherlv_7= '>' )? ( ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) ) ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )? otherlv_13= ')' )? ( ( ( () '[' ) )=> (lv_arguments_14_0= ruleXClosure ) )? )
// InternalXbase.g:4427:3: () otherlv_1= 'new' ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_3= '<' ) ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) ) (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )* otherlv_7= '>' )? ( ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) ) ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )? otherlv_13= ')' )? ( ( ( () '[' ) )=> (lv_arguments_14_0= ruleXClosure ) )?
{
// InternalXbase.g:4427:3: ()
// InternalXbase.g:4428:4:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElement(
grammarAccess.getXConstructorCallAccess().getXConstructorCallAction_0(),
current);
}
}
otherlv_1=(Token)match(input,74,FOLLOW_61); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_1, grammarAccess.getXConstructorCallAccess().getNewKeyword_1());
}
// InternalXbase.g:4438:3: ( ( ruleQualifiedName ) )
// InternalXbase.g:4439:4: ( ruleQualifiedName )
{
// InternalXbase.g:4439:4: ( ruleQualifiedName )
// InternalXbase.g:4440:5: ruleQualifiedName
{
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXConstructorCallRule());
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getConstructorJvmConstructorCrossReference_2_0());
}
pushFollow(FOLLOW_65);
ruleQualifiedName();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall();
}
}
}
// InternalXbase.g:4454:3: ( ( ( '<' )=>otherlv_3= '<' ) ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) ) (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )* otherlv_7= '>' )?
int alt76=2;
alt76 = dfa76.predict(input);
switch (alt76) {
case 1 :
// InternalXbase.g:4455:4: ( ( '<' )=>otherlv_3= '<' ) ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) ) (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )* otherlv_7= '>'
{
// InternalXbase.g:4455:4: ( ( '<' )=>otherlv_3= '<' )
// InternalXbase.g:4456:5: ( '<' )=>otherlv_3= '<'
{
otherlv_3=(Token)match(input,19,FOLLOW_25); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_3, grammarAccess.getXConstructorCallAccess().getLessThanSignKeyword_3_0());
}
}
// InternalXbase.g:4462:4: ( (lv_typeArguments_4_0= ruleJvmArgumentTypeReference ) )
// InternalXbase.g:4463:5: (lv_typeArguments_4_0= ruleJvmArgumentTypeReference )
{
// InternalXbase.g:4463:5: (lv_typeArguments_4_0= ruleJvmArgumentTypeReference )
// InternalXbase.g:4464:6: lv_typeArguments_4_0= ruleJvmArgumentTypeReference
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getTypeArgumentsJvmArgumentTypeReferenceParserRuleCall_3_1_0());
}
pushFollow(FOLLOW_26);
lv_typeArguments_4_0=ruleJvmArgumentTypeReference();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConstructorCallRule());
}
add(
current,
"typeArguments",
lv_typeArguments_4_0,
"org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference");
afterParserOrEnumRuleCall();
}
}
}
// InternalXbase.g:4481:4: (otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) ) )*
loop75:
do {
int alt75=2;
int LA75_0 = input.LA(1);
if ( (LA75_0==48) ) {
alt75=1;
}
switch (alt75) {
case 1 :
// InternalXbase.g:4482:5: otherlv_5= ',' ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) )
{
otherlv_5=(Token)match(input,48,FOLLOW_25); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_5, grammarAccess.getXConstructorCallAccess().getCommaKeyword_3_2_0());
}
// InternalXbase.g:4486:5: ( (lv_typeArguments_6_0= ruleJvmArgumentTypeReference ) )
// InternalXbase.g:4487:6: (lv_typeArguments_6_0= ruleJvmArgumentTypeReference )
{
// InternalXbase.g:4487:6: (lv_typeArguments_6_0= ruleJvmArgumentTypeReference )
// InternalXbase.g:4488:7: lv_typeArguments_6_0= ruleJvmArgumentTypeReference
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getTypeArgumentsJvmArgumentTypeReferenceParserRuleCall_3_2_1_0());
}
pushFollow(FOLLOW_26);
lv_typeArguments_6_0=ruleJvmArgumentTypeReference();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConstructorCallRule());
}
add(
current,
"typeArguments",
lv_typeArguments_6_0,
"org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference");
afterParserOrEnumRuleCall();
}
}
}
}
break;
default :
break loop75;
}
} while (true);
otherlv_7=(Token)match(input,20,FOLLOW_63); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_7, grammarAccess.getXConstructorCallAccess().getGreaterThanSignKeyword_3_3());
}
}
break;
}
// InternalXbase.g:4511:3: ( ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) ) ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )? otherlv_13= ')' )?
int alt79=2;
alt79 = dfa79.predict(input);
switch (alt79) {
case 1 :
// InternalXbase.g:4512:4: ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) ) ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )? otherlv_13= ')'
{
// InternalXbase.g:4512:4: ( ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' ) )
// InternalXbase.g:4513:5: ( ( '(' ) )=> (lv_explicitConstructorCall_8_0= '(' )
{
// InternalXbase.g:4517:5: (lv_explicitConstructorCall_8_0= '(' )
// InternalXbase.g:4518:6: lv_explicitConstructorCall_8_0= '('
{
lv_explicitConstructorCall_8_0=(Token)match(input,49,FOLLOW_28); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(lv_explicitConstructorCall_8_0, grammarAccess.getXConstructorCallAccess().getExplicitConstructorCallLeftParenthesisKeyword_4_0_0());
}
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXConstructorCallRule());
}
setWithLastConsumed(current, "explicitConstructorCall", true, "(");
}
}
}
// InternalXbase.g:4530:4: ( ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) ) | ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* ) )?
int alt78=3;
alt78 = dfa78.predict(input);
switch (alt78) {
case 1 :
// InternalXbase.g:4531:5: ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) )
{
// InternalXbase.g:4531:5: ( ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure ) )
// InternalXbase.g:4532:6: ( ( () ( ( ( ruleJvmFormalParameter ) ) ( ',' ( ( ruleJvmFormalParameter ) ) )* )? ( ( '|' ) ) ) )=> (lv_arguments_9_0= ruleXShortClosure )
{
// InternalXbase.g:4557:6: (lv_arguments_9_0= ruleXShortClosure )
// InternalXbase.g:4558:7: lv_arguments_9_0= ruleXShortClosure
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getArgumentsXShortClosureParserRuleCall_4_1_0_0());
}
pushFollow(FOLLOW_29);
lv_arguments_9_0=ruleXShortClosure();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConstructorCallRule());
}
add(
current,
"arguments",
lv_arguments_9_0,
"org.eclipse.xtext.xbase.Xbase.XShortClosure");
afterParserOrEnumRuleCall();
}
}
}
}
break;
case 2 :
// InternalXbase.g:4576:5: ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* )
{
// InternalXbase.g:4576:5: ( ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )* )
// InternalXbase.g:4577:6: ( (lv_arguments_10_0= ruleXExpression ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )*
{
// InternalXbase.g:4577:6: ( (lv_arguments_10_0= ruleXExpression ) )
// InternalXbase.g:4578:7: (lv_arguments_10_0= ruleXExpression )
{
// InternalXbase.g:4578:7: (lv_arguments_10_0= ruleXExpression )
// InternalXbase.g:4579:8: lv_arguments_10_0= ruleXExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getArgumentsXExpressionParserRuleCall_4_1_1_0_0());
}
pushFollow(FOLLOW_30);
lv_arguments_10_0=ruleXExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConstructorCallRule());
}
add(
current,
"arguments",
lv_arguments_10_0,
"org.eclipse.xtext.xbase.Xbase.XExpression");
afterParserOrEnumRuleCall();
}
}
}
// InternalXbase.g:4596:6: (otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) ) )*
loop77:
do {
int alt77=2;
int LA77_0 = input.LA(1);
if ( (LA77_0==48) ) {
alt77=1;
}
switch (alt77) {
case 1 :
// InternalXbase.g:4597:7: otherlv_11= ',' ( (lv_arguments_12_0= ruleXExpression ) )
{
otherlv_11=(Token)match(input,48,FOLLOW_4); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_11, grammarAccess.getXConstructorCallAccess().getCommaKeyword_4_1_1_1_0());
}
// InternalXbase.g:4601:7: ( (lv_arguments_12_0= ruleXExpression ) )
// InternalXbase.g:4602:8: (lv_arguments_12_0= ruleXExpression )
{
// InternalXbase.g:4602:8: (lv_arguments_12_0= ruleXExpression )
// InternalXbase.g:4603:9: lv_arguments_12_0= ruleXExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getArgumentsXExpressionParserRuleCall_4_1_1_1_1_0());
}
pushFollow(FOLLOW_30);
lv_arguments_12_0=ruleXExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConstructorCallRule());
}
add(
current,
"arguments",
lv_arguments_12_0,
"org.eclipse.xtext.xbase.Xbase.XExpression");
afterParserOrEnumRuleCall();
}
}
}
}
break;
default :
break loop77;
}
} while (true);
}
}
break;
}
otherlv_13=(Token)match(input,50,FOLLOW_64); if (state.failed) return current;
if ( state.backtracking==0 ) {
newLeafNode(otherlv_13, grammarAccess.getXConstructorCallAccess().getRightParenthesisKeyword_4_2());
}
}
break;
}
// InternalXbase.g:4628:3: ( ( ( () '[' ) )=> (lv_arguments_14_0= ruleXClosure ) )?
int alt80=2;
alt80 = dfa80.predict(input);
switch (alt80) {
case 1 :
// InternalXbase.g:4629:4: ( ( () '[' ) )=> (lv_arguments_14_0= ruleXClosure )
{
// InternalXbase.g:4635:4: (lv_arguments_14_0= ruleXClosure )
// InternalXbase.g:4636:5: lv_arguments_14_0= ruleXClosure
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXConstructorCallAccess().getArgumentsXClosureParserRuleCall_5_0());
}
pushFollow(FOLLOW_2);
lv_arguments_14_0=ruleXClosure();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXConstructorCallRule());
}
add(
current,
"arguments",
lv_arguments_14_0,
"org.eclipse.xtext.xbase.Xbase.XClosure");
afterParserOrEnumRuleCall();
}
}
}
break;
}
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} |
java | public JarSignerRequest createVerifyRequest( File jarFile, boolean certs )
{
JarSignerVerifyRequest request = new JarSignerVerifyRequest();
request.setCerts( certs );
request.setWorkingDirectory( workDirectory );
request.setMaxMemory( getMaxMemory() );
request.setVerbose( isVerbose() );
request.setArchive( jarFile );
return request;
} |
java | private static String getUserInfo(String url) {
String userInfo = null;
int startIndex = Integer.MIN_VALUE;
int nextSlashIndex = Integer.MIN_VALUE;
int endIndex = Integer.MIN_VALUE;
try {
// The user info start index should be the first index of "//".
startIndex = url.indexOf("//");
if (startIndex != -1) {
startIndex += 2;
// The user info should be found before the next '/' index.
nextSlashIndex = url.indexOf('/', startIndex);
if (nextSlashIndex == -1) {
nextSlashIndex = url.length();
}
// The user info ends at the last index of '@' from the previously
// computed subsequence.
endIndex = url.substring(startIndex, nextSlashIndex).lastIndexOf('@');
if (endIndex != -1) {
userInfo = url.substring(startIndex, startIndex + endIndex);
}
}
} catch (StringIndexOutOfBoundsException ex) {
System.err.println("String index out of bounds for:|" + url + "|");
System.err.println("Start index: " + startIndex);
System.err.println("Next slash index " + nextSlashIndex);
System.err.println("End index :" + endIndex);
System.err.println("User info :|" + userInfo + "|");
ex.printStackTrace();
}
return userInfo;
} |
python | def segments(self):
"""
A list of `Segment` objects.
The list starts with the *non-zero* label. The returned list
has a length equal to the number of labels and matches the order
of the ``labels`` attribute.
"""
segments = []
for label, slc in zip(self.labels, self.slices):
segments.append(Segment(self.data, label, slc,
self.get_area(label)))
return segments |
java | public static void copy(InputStream inputStream, OutputStream outputStream) throws IOException {
LOGGER.trace("copy(InputStream, OutputStream)");
copy(inputStream, outputStream, new byte[BUFFER_SIZE]);
} |
python | def setModel(self, model):
"""sets the model for the auto parameters
:param model: The data stucture for this editor to provide access to
:type model: :class:`QAutoParameterModel<sparkle.gui.stim.qauto_parameter_model.QAutoParameterModel>`
"""
self.paramList.setModel(model)
model.hintRequested.connect(self.hintRequested)
model.rowsInserted.connect(self.updateTitle)
model.rowsRemoved.connect(self.updateTitle)
self.updateTitle() |
java | public MapBuilder<K, V> putAll(Map<K, V> map) {
this.map.putAll(map);
return this;
} |
java | public int getNumWeights() {
if (weights == null) return 0;
int numWeights = 0;
for (double[] wts : weights) {
numWeights += wts.length;
}
return numWeights;
} |
python | def _aix_loadavg():
'''
Return the load average on AIX
'''
# 03:42PM up 9 days, 20:41, 2 users, load average: 0.28, 0.47, 0.69
uptime = __salt__['cmd.run']('uptime')
ldavg = uptime.split('load average')
load_avg = ldavg[1].split()
return {'1-min': load_avg[1].strip(','),
'5-min': load_avg[2].strip(','),
'15-min': load_avg[3]} |
java | @Override
public RecordReader<Key, Value>
createRecordReader(InputSplit split,
TaskAttemptContext context) throws IOException, InterruptedException {
return new MasterTextRecordReader();
} |
java | @Override
public void save(INDArray save, String id) throws SQLException, IOException {
doSave(save, id);
} |
java | private void put(Object obj, XmlParser.Node node) throws NoSuchMethodException,
ClassNotFoundException, InvocationTargetException, IllegalAccessException
{
if (!(obj instanceof Map))
throw new IllegalArgumentException("Object for put is not a Map: " + obj);
Map map = (Map) obj;
String name = node.getAttribute("name");
Object value = value(obj, node);
map.put(name, value);
if (log.isDebugEnabled()) log.debug(obj + ".put(" + name + "+" + value + ")");
} |
java | private void createColumnFamilies(List<TableInfo> tableInfos, KsDef ksDef) throws Exception
{
for (TableInfo tableInfo : tableInfos)
{
if (isCql3Enabled(tableInfo))
{
createOrUpdateUsingCQL3(tableInfo, ksDef);
createIndexUsingCql(tableInfo);
}
else
{
createOrUpdateColumnFamily(tableInfo, ksDef);
}
// Create Inverted Indexed Table if required.
createInvertedIndexTable(tableInfo, ksDef);
}
} |
java | protected void removeFromCache(String cacheName, String key) {
try {
ICache cache = getCache(cacheName);
if (cache != null) {
cache.delete(key);
}
} catch (CacheException e) {
LOGGER.warn(e.getMessage(), e);
}
} |
java | public ProcessAdapter execute(Class<?> type, String... args) {
return execute(FileSystemUtils.WORKING_DIRECTORY, type, args);
} |
python | def obsolete_client(func):
"""This is a decorator which can be used to mark Client classes as
obsolete. It will result in an error being emitted when the class is
instantiated."""
@functools.wraps(func)
def new_func(*args, **kwargs):
raise ObsoleteException(
"{} has been removed from this version of the library. "
"Please refer to current documentation for guidance."
.format(func.__name__)
)
return new_func |
python | def group_by_day(self):
"""Return a dictionary of this collection's values grouped by each day of year.
Key values are between 1-365.
"""
hourly_data_by_day = OrderedDict()
for d in xrange(1, 366):
hourly_data_by_day[d] = []
a_per = self.header.analysis_period
indx_per_day = 24 * a_per.timestep
start_doy = sum(a_per._num_of_days_each_month[:a_per.st_time.month-1]) \
+ a_per.st_time.day
if not a_per.is_reversed:
for i in range(0, len(self._values), indx_per_day):
hourly_data_by_day[start_doy] = self._values[i:i + indx_per_day]
start_doy += 1
else:
end_ind = 24 * a_per.timestep * (365 - start_doy)
for i in range(0, end_ind + 1, indx_per_day):
hourly_data_by_day[start_doy] = self._values[i:i + indx_per_day]
start_doy += 1
start_doy = 1
for i in range(end_ind, len(self._values), indx_per_day):
hourly_data_by_day[start_doy] = self._values[i:i + indx_per_day]
start_doy += 1
return hourly_data_by_day |
java | public static double computeTauAndDivideRow( final int blockLength ,
final DSubmatrixD1 Y ,
final int row , int colStart , final double max ) {
final int height = Math.min(blockLength , Y.row1-Y.row0);
final double dataY[] = Y.original.data;
double top=0;
double norm2 = 0;
int startJ = Y.col0 + colStart - colStart%blockLength;
colStart = colStart%blockLength;
for( int j = startJ; j < Y.col1; j += blockLength ) {
int width = Math.min( blockLength , Y.col1 - j );
int index = Y.row0*Y.original.numCols + height*j + row*width;
if( j == startJ ) {
index += colStart;
// save this value so that the sign can be determined later on
top = dataY[index] /= max;
norm2 += top*top;
index++;
for( int k = colStart+1; k < width; k++ ) {
double val = dataY[index++] /= max;
norm2 += val*val;
}
} else {
for( int k = 0; k < width; k++ ) {
double val = dataY[index++] /= max;
norm2 += val*val;
}
}
}
norm2 = Math.sqrt(norm2);
if( top < 0 )
norm2 = -norm2;
return norm2;
} |
python | def process_list_arg(arg):
""" Parse a string into a list separated by commas with whitespace stripped """
if isinstance(arg, list):
return arg
elif isinstance(arg, basestring):
args = []
for part in arg.split(","):
args.append(part.strip())
return args |
java | protected void buildFieldList(final String field, final List<String> fieldList) {
if(Utils.isEmpty(field)) {
return;
}
if(!fieldList.contains(field)) {
fieldList.add(field);
}
String plainField = String.valueOf(field);
int keyIndex = plainField.lastIndexOf('[');
while(keyIndex >= 0) {
int endKeyIndex = plainField.indexOf(']', keyIndex);
if(endKeyIndex >= 0) {
plainField = plainField.substring(0, keyIndex) + plainField.substring(endKeyIndex + 1);
if(!fieldList.contains(plainField)) {
fieldList.add(plainField);
}
keyIndex = plainField.lastIndexOf('[');
} else {
keyIndex = -1;
}
}
} |
python | def index_open(index, allow_no_indices=True, expand_wildcards='closed', ignore_unavailable=True, hosts=None, profile=None):
'''
.. versionadded:: 2017.7.0
Open specified index.
index
Index to be opened
allow_no_indices
Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes _all string or when no indices have been specified)
expand_wildcards
Whether to expand wildcard expression to concrete indices that are open, closed or both., default ‘closed’, valid choices are: ‘open’, ‘closed’, ‘none’, ‘all’
ignore_unavailable
Whether specified concrete indices should be ignored when unavailable (missing or closed)
CLI example::
salt myminion elasticsearch.index_open testindex
'''
es = _get_instance(hosts, profile)
try:
result = es.indices.open(index=index, allow_no_indices=allow_no_indices, expand_wildcards=expand_wildcards, ignore_unavailable=ignore_unavailable)
return result.get('acknowledged', False)
except elasticsearch.TransportError as e:
raise CommandExecutionError("Cannot open index {0}, server returned code {1} with message {2}".format(index, e.status_code, e.error)) |
python | def code(item):
"""
Turn a NameID class instance into a quoted string of comma separated
attribute,value pairs. The attribute names are replaced with digits.
Depends on knowledge on the specific order of the attributes for the
class that is used.
:param item: The class instance
:return: A quoted string
"""
_res = []
i = 0
for attr in ATTR:
val = getattr(item, attr)
if val:
_res.append("%d=%s" % (i, quote(val)))
i += 1
return ",".join(_res) |
java | public Observable<Page<SyncMemberInner>> listBySyncGroupAsync(final String resourceGroupName, final String serverName, final String databaseName, final String syncGroupName) {
return listBySyncGroupWithServiceResponseAsync(resourceGroupName, serverName, databaseName, syncGroupName)
.map(new Func1<ServiceResponse<Page<SyncMemberInner>>, Page<SyncMemberInner>>() {
@Override
public Page<SyncMemberInner> call(ServiceResponse<Page<SyncMemberInner>> response) {
return response.body();
}
});
} |
python | def syncItems(self, client=None, clientId=None):
""" Returns an instance of :class:`plexapi.sync.SyncList` for specified client.
Parameters:
client (:class:`~plexapi.myplex.MyPlexDevice`): a client to query SyncItems for.
clientId (str): an identifier of a client to query SyncItems for.
If both `client` and `clientId` provided the client would be preferred.
If neither `client` nor `clientId` provided the clientId would be set to current clients`s identifier.
"""
if client:
clientId = client.clientIdentifier
elif clientId is None:
clientId = X_PLEX_IDENTIFIER
data = self.query(SyncList.key.format(clientId=clientId))
return SyncList(self, data) |
python | def __protocolize(base_url):
"""Internal add-protocol-to-url helper"""
if not base_url.startswith("http://") and not base_url.startswith("https://"):
base_url = "https://" + base_url
# Some API endpoints can't handle extra /'s in path requests
base_url = base_url.rstrip("/")
return base_url |
python | def fetchmany(self,size=-1):
""" return a sequential set of records. This is guaranteed by locking,
so that no other thread can grab a few records while a set is fetched.
this has the side effect that other threads may have to wait for
an arbitrary long time for the completion of the current request.
"""
self._cursorLock.acquire()
# default value (or just checking that someone did not put a ridiculous size)
if size < 0 or size > self.MAX_BLOCK_SIZE:
size = self.arraysize
recs = []
for i in range(0,size):
recs.append(self.fetchone())
self._cursorLock.release()
return recs |
java | @Override
public boolean supportsNativeRotation() {
return this.params.useNativeAngle &&
(this.params.serverType == WmsLayerParam.ServerType.MAPSERVER ||
this.params.serverType == WmsLayerParam.ServerType.GEOSERVER);
} |
python | def extractRecord(resolver, name, answers, level=10):
'''
This method is copy-pasted from twisted.names.common.
The difference with the original is, that it favors IPv4 responses over
IPv6. This is motivated by the problem of resolving "maps.googleapis.com"
name, which has both types of entries.
The logic in twisted.internet.tcp.Connector assumes the IPv4 type of
address, and it fails to connect if IPv6 address is given.
This problem only occurs with Twisted 10.2. In 12.1 the Connector
implementation can already handle both types of addresses.
'''
if not level:
return None
for r in answers:
if r.name == name and r.type == dns.A:
return socket.inet_ntop(socket.AF_INET, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.CNAME:
result = extractRecord(
resolver, r.payload.name, answers, level - 1)
if not result:
return resolver.getHostByName(
str(r.payload.name), effort=level - 1)
return result
if hasattr(socket, 'inet_ntop'):
for r in answers:
if r.name == name and r.type == dns.A6:
return socket.inet_ntop(socket.AF_INET6, r.payload.address)
for r in answers:
if r.name == name and r.type == dns.AAAA:
return socket.inet_ntop(socket.AF_INET6, r.payload.address)
# No answers, but maybe there's a hint at who we should be asking about
# this
for r in answers:
if r.type == dns.NS:
r = Resolver(servers=[(str(r.payload.name), dns.PORT)])
d = r.lookupAddress(str(name))
d.addCallback(lambda (ans, auth, add):
extractRecord(r, name, ans + auth + add, level - 1))
return d |
python | def stop_video(self, tick):
"""Stop video if tick is more than the end, only for last file.
Parameters
----------
tick : int
time in ms from the beginning of the file
useless?
"""
if self.cnt_video == self.n_video:
if tick >= self.end_diff:
self.idx_button.setText('Start')
self.video.stop() |
java | public static String buildCleanedParametersURIRepresentation(org.apache.commons.httpclient.URI uri,
SpiderParam.HandleParametersOption handleParameters, boolean handleODataParametersVisited) throws URIException {
// If the option is set to use all the information, just use the default string representation
if (handleParameters.equals(HandleParametersOption.USE_ALL)) {
return uri.toString();
}
// If the option is set to ignore parameters completely, ignore the query completely
if (handleParameters.equals(HandleParametersOption.IGNORE_COMPLETELY)) {
return createBaseUriWithCleanedPath(uri, handleParameters, handleODataParametersVisited);
}
// If the option is set to ignore the value, we get the parameters and we only add their name to the
// query
if (handleParameters.equals(HandleParametersOption.IGNORE_VALUE)) {
StringBuilder retVal = new StringBuilder(
createBaseUriWithCleanedPath(uri, handleParameters, handleODataParametersVisited));
String cleanedQuery = getCleanedQuery(uri.getEscapedQuery());
// Add the parameters' names to the uri representation.
if(cleanedQuery.length()>0) {
retVal.append('?').append(cleanedQuery);
}
return retVal.toString();
}
// Should not be reached
return uri.toString();
} |
python | def stop_service(self, instance, service):
"""
Stops a single service.
:param str instance: A Yamcs instance name.
:param str service: The name of the service.
"""
req = rest_pb2.EditServiceRequest()
req.state = 'stopped'
url = '/services/{}/{}'.format(instance, service)
self.patch_proto(url, data=req.SerializeToString()) |
java | public static String getRandomFilename(final String prefix) {
final Random rnd = new Random();
final StringBuilder stringBuilder = new StringBuilder(prefix);
for (int i = 0; i < RANDOM_FILE_NAME_LENGTH; i++) {
stringBuilder.append(ALPHABET[rnd.nextInt(ALPHABET.length)]);
}
return stringBuilder.toString();
} |
java | void setMarshaledContext(ContextItems marshaledContext, boolean commit, ISurveyCallback callback) {
ISurveyResponse response = new SurveyResponse();
Iterator<IManagedContext<?>> iter = managedContexts.iterator();
setMarshaledContext(marshaledContext, iter, response, __ -> {
if (commit) {
commitContexts(!response.rejected(), false);
}
});
} |
python | def _writable_required(self, path):
# type: (Text) -> FS
"""Check that ``path`` is writeable.
"""
if self.write_fs is None:
raise errors.ResourceReadOnly(path)
return self.write_fs |
java | @Nullable
final Permutation calculatePermutation( @Nonnull final TreeLogger logger,
@Nonnull final LinkerContext context,
@Nonnull final ArtifactSet artifacts )
throws UnableToCompleteException
{
Permutation permutation = null;
for ( final SelectionInformation result : artifacts.find( SelectionInformation.class ) )
{
final String strongName = result.getStrongName();
if ( null != permutation && !permutation.getPermutationName().equals( strongName ) )
{
throw new UnableToCompleteException();
}
if ( null == permutation )
{
permutation = new Permutation( strongName );
final Set<String> artifactsForCompilation = getArtifactsForCompilation( context, artifacts );
permutation.getPermutationFiles().addAll( artifactsForCompilation );
}
final List<BindingProperty> list = new ArrayList<>();
for ( final SelectionProperty property : context.getProperties() )
{
if ( !property.isDerived() )
{
final String name = property.getName();
final String value = result.getPropMap().get( name );
if ( null != value )
{
list.add( new BindingProperty( name, value ) );
}
}
}
final SelectionDescriptor selection = new SelectionDescriptor( strongName, list );
final List<SelectionDescriptor> selectors = permutation.getSelectors();
if ( !selectors.contains( selection ) )
{
selectors.add( selection );
}
}
if ( null != permutation )
{
logger.log( Type.DEBUG, "Calculated Permutation: " + permutation.getPermutationName() +
" Selectors: " + permutation.getSelectors() );
}
return permutation;
} |
java | @Override
public void set(int index, byte[] data, long scn) throws Exception {
if(data == null) {
set(index, data, 0, 0, scn);
} else {
set(index, data, 0, data.length, scn);
}
} |
java | public synchronized void abortTransaction() throws TransactionNotInProgressException
{
if(isInTransaction())
{
fireBrokerEvent(BEFORE_ROLLBACK_EVENT);
setInTransaction(false);
clearRegistrationLists();
referencesBroker.removePrefetchingListeners();
/*
arminw:
check if we in local tx, before do local rollback
Necessary, because ConnectionManager may do a rollback by itself
or in managed environments the used connection is already be closed
*/
if(connectionManager.isInLocalTransaction()) this.connectionManager.localRollback();
fireBrokerEvent(AFTER_ROLLBACK_EVENT);
}
} |
java | public URL getResource(String path) throws MalformedURLException {
Result<URL,MalformedURLException> result = getResourceCache.get(path, getResourceRefresher);
MalformedURLException exception = result.getException();
if(exception != null) throw exception;
return result.getValue();
} |
python | def remove_negativescores_nodes(self):
"""\
if there are elements inside our top node
that have a negative gravity score,
let's give em the boot
"""
gravity_items = self.parser.css_select(self.top_node, "*[gravityScore]")
for item in gravity_items:
score = self.parser.getAttribute(item, 'gravityScore')
score = int(score, 0)
if score < 1:
item.getparent().remove(item) |
java | @Override
public final boolean removeAttribute(Object key, Object value) {
return attributes.removeAttribute(this, key, value);
} |
python | def distance(self, host):
"""
Checks if ``predicate(host)``, then returns
:attr:`~HostDistance.IGNORED` if falsey, and defers to the child policy
otherwise.
"""
if self.predicate(host):
return self._child_policy.distance(host)
else:
return HostDistance.IGNORED |
python | def load_all_yamls(cls, directories):
"""Loads yaml files from all given directories.
Args:
directories: list of directories to search
Returns:
dict of {fullpath: loaded_yaml_structure}
"""
yaml_files = []
loaded_yamls = {}
for d in directories:
if d.startswith('/home') and not os.path.exists(d):
os.makedirs(d)
for dirname, subdirs, files in os.walk(d):
yaml_files.extend(map(lambda x: os.path.join(dirname, x),
filter(lambda x: x.endswith('.yaml'), files)))
for f in yaml_files:
loaded_yamls[f] = cls.load_yaml_by_path(f)
return loaded_yamls |
java | @Override
public Device get(String deviceId) {
DBCursor devices = devicesCollection().find(new BasicDBObject(ImmutableMap.of("deviceId", deviceId)));
if(devices.hasNext()) {
return dbObjectToDevice(devices.next());
}
return null;
} |
java | protected JmsJcaSession createJcaSession(boolean transacted) throws JMSException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "createJcaSession", transacted);
JmsJcaSession jcaSess = null;
// If we have a JCA connection, then make a JCA session
if (jcaConnection != null) {
try {
jcaSess = jcaConnection.createSession(transacted);
} catch (Exception e) { // ResourceE, IllegalStateE, SIE, SIErrorE
// No FFDC code needed
// d238447 Generate FFDC for these cases.
throw (JMSException) JmsErrorUtils.newThrowable(
JMSException.class,
"JCA_CREATE_SESS_CWSIA0024",
null,
e,
"JmsConnectionImpl.createSession#1",
this,
tc);
}
}
else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
SibTr.debug(this, tc, "jcaConnection is null, returning null jcaSess");
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "createJcaSession", jcaSess);
return jcaSess;
} |
java | @Api
public void addButton(String layerId, ToolStripButton button, int position) {
extraButtons.add(new ExtraButton(constructIdSaveLayerId(layerId), button, position));
} |
python | def readinto(self, data):
"""Read data from the ring buffer into a user-provided buffer.
This advances the read index after reading;
calling :meth:`advance_read_index` is *not* necessary.
:param data: The memory where the data should be stored.
:type data: CData pointer or buffer
:returns: The number of elements read, which may be less than
the size of *data*.
:rtype: int
"""
try:
data = self._ffi.from_buffer(data)
except TypeError:
pass # input is not a buffer
size, rest = divmod(self._ffi.sizeof(data), self.elementsize)
if rest:
raise ValueError('data size must be multiple of elementsize')
return self._lib.PaUtil_ReadRingBuffer(self._ptr, data, size) |
python | def create_session(
self,
kind: SessionKind,
proxy_user: str = None,
jars: List[str] = None,
py_files: List[str] = None,
files: List[str] = None,
driver_memory: str = None,
driver_cores: int = None,
executor_memory: str = None,
executor_cores: int = None,
num_executors: int = None,
archives: List[str] = None,
queue: str = None,
name: str = None,
spark_conf: Dict[str, Any] = None,
) -> Session:
"""Create a new session in Livy.
The py_files, files, jars and archives arguments are lists of URLs,
e.g. ["s3://bucket/object", "hdfs://path/to/file", ...] and must be
reachable by the Spark driver process. If the provided URL has no
scheme, it's considered to be relative to the default file system
configured in the Livy server.
URLs in the py_files argument are copied to a temporary staging area
and inserted into Python's sys.path ahead of the standard library
paths. This allows you to import .py, .zip and .egg files in Python.
URLs for jars, py_files, files and archives arguments are all copied
to the same working directory on the Spark cluster.
The driver_memory and executor_memory arguments have the same format
as JVM memory strings with a size unit suffix ("k", "m", "g" or "t")
(e.g. 512m, 2g).
See https://spark.apache.org/docs/latest/configuration.html for more
information on Spark configuration properties.
:param kind: The kind of session to create.
:param proxy_user: User to impersonate when starting the session.
:param jars: URLs of jars to be used in this session.
:param py_files: URLs of Python files to be used in this session.
:param files: URLs of files to be used in this session.
:param driver_memory: Amount of memory to use for the driver process
(e.g. '512m').
:param driver_cores: Number of cores to use for the driver process.
:param executor_memory: Amount of memory to use per executor process
(e.g. '512m').
:param executor_cores: Number of cores to use for each executor.
:param num_executors: Number of executors to launch for this session.
:param archives: URLs of archives to be used in this session.
:param queue: The name of the YARN queue to which submitted.
:param name: The name of this session.
:param spark_conf: Spark configuration properties.
"""
if self.legacy_server():
valid_kinds = VALID_LEGACY_SESSION_KINDS
else:
valid_kinds = VALID_SESSION_KINDS
if kind not in valid_kinds:
raise ValueError(
f"{kind} is not a valid session kind for a Livy server of "
f"this version (should be one of {valid_kinds})"
)
body = {"kind": kind.value}
if proxy_user is not None:
body["proxyUser"] = proxy_user
if jars is not None:
body["jars"] = jars
if py_files is not None:
body["pyFiles"] = py_files
if files is not None:
body["files"] = files
if driver_memory is not None:
body["driverMemory"] = driver_memory
if driver_cores is not None:
body["driverCores"] = driver_cores
if executor_memory is not None:
body["executorMemory"] = executor_memory
if executor_cores is not None:
body["executorCores"] = executor_cores
if num_executors is not None:
body["numExecutors"] = num_executors
if archives is not None:
body["archives"] = archives
if queue is not None:
body["queue"] = queue
if name is not None:
body["name"] = name
if spark_conf is not None:
body["conf"] = spark_conf
data = self._client.post("/sessions", data=body)
return Session.from_json(data) |
python | def fetch(self, url, path):
""" Downloads the given url.
:param url:
The url to be downloaded.
:type url:
String
:param path:
The directory path to where the image should be stored
:type path:
String
:param filename:
The filename that has to be downloaded
:type filename:
String
:returns:
Boolean
"""
segments = url.split('/')
filename = segments[-1]
# remove query parameters from the filename
filename = filename.split('?')[0]
self.output('Downloading: %s' % filename, normal=True, arrow=True)
# print(join(path, filename))
# raise Exception
if exists(join(path, filename)):
size = getsize(join(path, filename))
if size == self.get_remote_file_size(url):
self.output('%s already exists on your system' % filename, normal=True, color='green', indent=1)
else:
fetch(url, path)
self.output('stored at %s' % path, normal=True, color='green', indent=1)
return join(path, filename) |
python | def run_models(self):
""" Run all models.
Returns
-------
model
Best model
dict
Metrics of the models
"""
self.linear_regression()
self.lasso_regression()
self.ridge_regression()
self.elastic_net_regression()
self.random_forest()
self.ann()
# Index of the model with max score
best_model_index = self.max_scores.index(max(self.max_scores))
# Store name of the optimal model
self.best_model_name = self.model_names[best_model_index]
# Store optimal model
self.best_model = self.models[best_model_index]
return self.metrics |
java | @SuppressWarnings("unchecked")
public static <T> Parcelable wrap(T input) {
if(input == null){
return null;
}
return wrap(input.getClass(), input);
} |
python | def connect(self, devicelist, calibration=True):
"""Establish a connection to one or more SK8 devices.
Given a list of 1 or more :class:`ScanResult` objects, this method will attempt
to create a connection to each SK8 in sequence. It will return when
all connections have been attempted, although they may not all have succeeded. In
addition, the dongle has a limit on simultaneous connections, which you can
retrieve by calling :meth:`get_supported_connections`. If the number of
supplied device names exceeds this value then the method will abort
immediately.
Args:
devicelist (list): a list of :class:`ScanResult` instances, one for each
SK8 you wish to create a connection to.
calibration (bool): True if calibration data should be loaded post-connection,
for each device (if available).
Returns:
tuple (`result`, `devices`), where `result` is a bool indicating if
connections were successfully made to all given devices. If True,
`devices` will contain a list of :class:`SK8` instances representing
the connected SK8 devices. If False, `devices` will contain a smaller
number of :class:`SK8` instances depending on the number of connections
that succeeded (possibly 0).
"""
if not isinstance(devicelist, list):
devicelist = [devicelist]
logger.debug('Connecting to {} devices'.format(len(devicelist)))
if len(devicelist) > self.supported_connections:
logging.error('Dongle firmware supports max {} connections, {} device connections requested!'.format(self.supported_connections, len(devicelist)))
return (False, [])
# TODO check number of active connections and fail if exceeds max
connected_devices = []
all_connected = True
for dev in devicelist:
logger.info('Connecting to {} (name={})...'.format(dev.addr, dev.name))
self._set_state(self._STATE_CONNECTING)
self.api.ble_cmd_gap_connect_direct(dev.raw_addr, 0, 6, 14, 100, 50)
self._wait_for_state(self._STATE_CONNECTING, 5)
if self.state != self._STATE_CONNECTED:
logger.warn('Connection failed!')
# send end procedure to cancel connection attempt
self._set_state(self._STATE_GAP_END)
self.api.ble_cmd_gap_end_procedure()
self._wait_for_state(self._STATE_GAP_END)
all_connected = False
continue
conn_handle = self.conn_handles[-1]
logger.info('Connection OK, handle is 0x{:02X}'.format(conn_handle))
sk8 = SK8(self, conn_handle, dev, calibration)
self._add_device(sk8)
connected_devices.append(sk8)
sk8._discover_services()
time.sleep(0.1) # TODO
return (all_connected, connected_devices) |
python | async def take(self, tube, timeout=None):
"""
Get a task from queue for execution.
Waits `timeout` seconds until a READY task appears in the queue.
If `timeout` is `None` - waits forever.
Returns tarantool tuple object.
"""
cmd = tube.cmd("take")
args = ()
if timeout is not None:
args += (timeout,)
res = await self.tnt.call(cmd, args)
return res |
java | @Autowired(required = false)
public void setDataExporters(Collection<IDataExporter<? extends Object>> dataExporters) {
this.dataExporters = dataExporters;
} |
java | public CMAAsset create(String spaceId, String environmentId, CMAAsset asset) {
assertNotNull(spaceId, "spaceId");
assertNotNull(environmentId, "environmentId");
assertNotNull(asset, "asset");
final String assetId = asset.getId();
final CMASystem sys = asset.getSystem();
asset.setSystem(null);
try {
if (assetId == null) {
return service.create(spaceId, environmentId, asset).blockingFirst();
} else {
return service.create(spaceId, environmentId, assetId, asset).blockingFirst();
}
} finally {
asset.setSystem(sys);
}
} |
java | @Deprecated
@Internal
public <R, ACC> SingleOutputStreamOperator<R> fold(
ACC initialValue,
FoldFunction<T, ACC> foldFunction,
ProcessWindowFunction<ACC, R, K, W> windowFunction,
TypeInformation<ACC> foldResultType,
TypeInformation<R> windowResultType) {
if (foldFunction instanceof RichFunction) {
throw new UnsupportedOperationException("FoldFunction can not be a RichFunction.");
}
if (windowAssigner instanceof MergingWindowAssigner) {
throw new UnsupportedOperationException("Fold cannot be used with a merging WindowAssigner.");
}
//clean the closures
windowFunction = input.getExecutionEnvironment().clean(windowFunction);
foldFunction = input.getExecutionEnvironment().clean(foldFunction);
final String opName = generateOperatorName(windowAssigner, trigger, evictor, foldFunction, windowFunction);
KeySelector<T, K> keySel = input.getKeySelector();
OneInputStreamOperator<T, R> operator;
if (evictor != null) {
@SuppressWarnings({"unchecked", "rawtypes"})
TypeSerializer<StreamRecord<T>> streamRecordSerializer =
(TypeSerializer<StreamRecord<T>>) new StreamElementSerializer(input.getType().createSerializer(getExecutionEnvironment().getConfig()));
ListStateDescriptor<StreamRecord<T>> stateDesc =
new ListStateDescriptor<>("window-contents", streamRecordSerializer);
operator =
new EvictingWindowOperator<>(windowAssigner,
windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()),
keySel,
input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()),
stateDesc,
new InternalIterableProcessWindowFunction<>(new FoldApplyProcessWindowFunction<>(initialValue, foldFunction, windowFunction, foldResultType)),
trigger,
evictor,
allowedLateness,
lateDataOutputTag);
} else {
FoldingStateDescriptor<T, ACC> stateDesc = new FoldingStateDescriptor<>("window-contents",
initialValue,
foldFunction,
foldResultType.createSerializer(getExecutionEnvironment().getConfig()));
operator =
new WindowOperator<>(windowAssigner,
windowAssigner.getWindowSerializer(getExecutionEnvironment().getConfig()),
keySel,
input.getKeyType().createSerializer(getExecutionEnvironment().getConfig()),
stateDesc,
new InternalSingleValueProcessWindowFunction<>(windowFunction),
trigger,
allowedLateness,
lateDataOutputTag);
}
return input.transform(opName, windowResultType, operator);
} |
python | def remove_listener(self, event, callback, single=None, priority=None):
""" Remove the event listener matching the same signature used for
adding it. This will remove AT MOST one entry meeting the signature
requirements. """
event_stack = self._events[event]
for x in event_stack:
if x['callback'] == callback and \
(single is None or x['single'] == single) and \
(priority is None or x['priority'] == priority):
event_stack.remove(x)
break
else:
raise KeyError('Listener not found for "%s": %s' % (event,
callback)) |
java | public ResponseWrapper addChatRoomMember(long roomId, String... members)
throws APIConnectionException, APIRequestException {
Preconditions.checkArgument(roomId > 0, "room id is invalid");
Preconditions.checkArgument(members != null && members.length > 0, "member should not be empty");
JsonArray array = new JsonArray();
for (String username : members) {
array.add(new JsonPrimitive(username));
}
return _httpClient.sendPut(_baseUrl + mChatRoomPath + "/" + roomId + "/members", array.toString());
} |
java | protected void rewriteContent(CmsFile file, Collection<CmsRelation> relations) throws CmsException {
LOG.info("Rewriting in-content links for " + file.getRootPath());
CmsPair<String, String> contentAndEncoding = decode(file);
String content = "";
if (OpenCms.getResourceManager().getResourceType(file) instanceof CmsResourceTypeXmlContent) {
CmsXmlContent contentXml = CmsXmlContentFactory.unmarshal(m_cms, file);
try {
contentXml.validateXmlStructure(new CmsXmlEntityResolver(m_cms));
} catch (CmsException e) {
LOG.info("XML content was corrected automatically for resource " + file.getRootPath());
contentXml.setAutoCorrectionEnabled(true);
contentXml.correctXmlStructure(m_cms);
try {
content = new String(contentXml.marshal(), contentAndEncoding.getSecond());
} catch (UnsupportedEncodingException e1) {
//
}
}
}
if (content.isEmpty()) {
content = contentAndEncoding.getFirst();
}
String encodingForSave = contentAndEncoding.getSecond();
String newContent = rewriteContentString(content);
byte[] newContentBytes;
try {
newContentBytes = newContent.getBytes(encodingForSave);
} catch (UnsupportedEncodingException e) {
newContentBytes = newContent.getBytes();
}
file.setContents(newContentBytes);
m_cms.writeFile(file);
} |
python | def next(self, day_of_week=None):
"""
Modify to the next occurrence of a given day of the week.
If no day_of_week is provided, modify to the next occurrence
of the current day of the week. Use the supplied consts
to indicate the desired day_of_week, ex. pendulum.MONDAY.
:param day_of_week: The next day of week to reset to.
:type day_of_week: int or None
:rtype: Date
"""
if day_of_week is None:
day_of_week = self.day_of_week
if day_of_week < SUNDAY or day_of_week > SATURDAY:
raise ValueError("Invalid day of week")
dt = self.add(days=1)
while dt.day_of_week != day_of_week:
dt = dt.add(days=1)
return dt |
python | def parse(self, contents):
"""Parse the document.
:param contents: The text contents of the document.
:rtype: a *generator* of tokenized text.
"""
i = 0
for text in contents.split(self.delim):
if not len(text.strip()):
continue
words = text.split()
char_offsets = [0] + [
int(_) for _ in np.cumsum([len(x) + 1 for x in words])[:-1]
]
text = " ".join(words)
yield {
"text": text,
"words": words,
"pos_tags": [""] * len(words),
"ner_tags": [""] * len(words),
"lemmas": [""] * len(words),
"dep_parents": [0] * len(words),
"dep_labels": [""] * len(words),
"char_offsets": char_offsets,
"abs_char_offsets": char_offsets,
}
i += 1 |
java | public void startPart(String contentType) throws IOException {
if (inPart)
out.write(__CRLF);
out.write(__DASHDASH);
out.write(boundary);
out.write(__CRLF);
out.write("Content-Type: ");
out.write(contentType);
out.write(__CRLF);
out.write(__CRLF);
inPart = true;
} |
python | def migrate_flow_collection(apps, schema_editor):
"""Migrate 'flow_collection' field to 'entity_type'."""
Process = apps.get_model('flow', 'Process')
DescriptorSchema = apps.get_model('flow', 'DescriptorSchema')
for process in Process.objects.all():
process.entity_type = process.flow_collection
process.entity_descriptor_schema = process.flow_collection
if (process.entity_descriptor_schema is not None and
not DescriptorSchema.objects.filter(slug=process.entity_descriptor_schema).exists()):
raise LookupError(
"Descriptow schema '{}' referenced in 'entity_descriptor_schema' not "
"found.".format(process.entity_descriptor_schema)
)
process.save() |
python | def _get_address_override(endpoint_type=PUBLIC):
"""Returns any address overrides that the user has defined based on the
endpoint type.
Note: this function allows for the service name to be inserted into the
address if the user specifies {service_name}.somehost.org.
:param endpoint_type: the type of endpoint to retrieve the override
value for.
:returns: any endpoint address or hostname that the user has overridden
or None if an override is not present.
"""
override_key = ADDRESS_MAP[endpoint_type]['override']
addr_override = config(override_key)
if not addr_override:
return None
else:
return addr_override.format(service_name=service_name()) |
java | @Deprecated
@PublicEvolving
public DataSink<T> sortLocalOutput(String fieldExpression, Order order) {
int numFields;
int[] fields;
Order[] orders;
// compute flat field positions for (nested) sorting fields
Keys.ExpressionKeys<T> ek = new Keys.ExpressionKeys<>(fieldExpression, this.type);
fields = ek.computeLogicalKeyPositions();
if (!Keys.ExpressionKeys.isSortKey(fieldExpression, this.type)) {
throw new InvalidProgramException("Selected sort key is not a sortable type");
}
numFields = fields.length;
orders = new Order[numFields];
Arrays.fill(orders, order);
if (this.sortKeyPositions == null) {
// set sorting info
this.sortKeyPositions = fields;
this.sortOrders = orders;
} else {
// append sorting info to existing info
int oldLength = this.sortKeyPositions.length;
int newLength = oldLength + numFields;
this.sortKeyPositions = Arrays.copyOf(this.sortKeyPositions, newLength);
this.sortOrders = Arrays.copyOf(this.sortOrders, newLength);
for (int i = 0; i < numFields; i++) {
this.sortKeyPositions[oldLength + i] = fields[i];
this.sortOrders[oldLength + i] = orders[i];
}
}
return this;
} |
java | public static void addTopologyMaster(Map stormConf, StormTopology ret) {
// generate outputs
HashMap<String, StreamInfo> outputs = new HashMap<>();
List<String> list = JStormUtils.mk_list(TopologyMaster.FILED_CTRL_EVENT);
outputs.put(TOPOLOGY_MASTER_CONTROL_STREAM_ID, Thrift.outputFields(list));
list = JStormUtils.mk_list(TopologyMaster.FIELD_METRIC_WORKER, TopologyMaster.FIELD_METRIC_METRICS);
outputs.put(TOPOLOGY_MASTER_METRICS_STREAM_ID, Thrift.outputFields(list));
// TM --> heartbeat
list = JStormUtils.mk_list(TopologyMaster.FILED_HEARBEAT_EVENT);
outputs.put(TOPOLOGY_MASTER_HB_STREAM_ID, Thrift.outputFields(list));
// TM --> register metrics
list = JStormUtils.mk_list(TopologyMaster.FIELD_REGISTER_METRICS);
outputs.put(TOPOLOGY_MASTER_REGISTER_METRICS_STREAM_ID, Thrift.outputFields(list));
// TM --> register metrics resp
list = JStormUtils.mk_list(TopologyMaster.FIELD_REGISTER_METRICS_RESP);
outputs.put(TOPOLOGY_MASTER_REGISTER_METRICS_RESP_STREAM_ID, Thrift.outputFields(list));
list = JStormUtils.mk_list(TopologyMaster.FILED_UDF_STREAM_EVENT);
outputs.put(TopologyMaster.USER_DEFINED_STREAM, Thrift.outputFields(list));
IBolt topologyMaster = new TopologyMaster();
// generate inputs
Map<GlobalStreamId, Grouping> inputs = topoMasterInputs(ret);
// generate topology master which will be stored in topology
Bolt topologyMasterBolt = Thrift.mkBolt(inputs, topologyMaster, outputs, 1);
// add output stream to spout/bolt
for (Entry<String, Bolt> e : ret.get_bolts().entrySet()) {
Bolt bolt = e.getValue();
ComponentCommon common = bolt.get_common();
List<String> fields = JStormUtils.mk_list(TopologyMaster.FIELD_METRIC_WORKER, TopologyMaster.FIELD_METRIC_METRICS);
common.put_to_streams(TOPOLOGY_MASTER_METRICS_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FILED_HEARBEAT_EVENT);
common.put_to_streams(TOPOLOGY_MASTER_HB_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FILED_CTRL_EVENT);
common.put_to_streams(TOPOLOGY_MASTER_CONTROL_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FIELD_REGISTER_METRICS);
common.put_to_streams(TOPOLOGY_MASTER_REGISTER_METRICS_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FIELD_REGISTER_METRICS_RESP);
common.put_to_streams(TOPOLOGY_MASTER_REGISTER_METRICS_RESP_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FILED_UDF_STREAM_EVENT);
common.put_to_streams(TopologyMaster.USER_DEFINED_STREAM, Thrift.directOutputFields(fields));
GlobalStreamId stream = new GlobalStreamId(TOPOLOGY_MASTER_COMPONENT_ID, TOPOLOGY_MASTER_CONTROL_STREAM_ID);
common.put_to_inputs(stream, Thrift.mkDirectGrouping());
bolt.set_common(common);
}
for (Entry<String, SpoutSpec> kv : ret.get_spouts().entrySet()) {
SpoutSpec spout = kv.getValue();
ComponentCommon common = spout.get_common();
List<String> fields = JStormUtils.mk_list(TopologyMaster.FIELD_METRIC_WORKER, TopologyMaster.FIELD_METRIC_METRICS);
common.put_to_streams(TOPOLOGY_MASTER_METRICS_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FILED_HEARBEAT_EVENT);
common.put_to_streams(TOPOLOGY_MASTER_HB_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FILED_CTRL_EVENT);
common.put_to_streams(TOPOLOGY_MASTER_CONTROL_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FIELD_REGISTER_METRICS);
common.put_to_streams(TOPOLOGY_MASTER_REGISTER_METRICS_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FIELD_REGISTER_METRICS_RESP);
common.put_to_streams(TOPOLOGY_MASTER_REGISTER_METRICS_RESP_STREAM_ID, Thrift.directOutputFields(fields));
fields = JStormUtils.mk_list(TopologyMaster.FILED_UDF_STREAM_EVENT);
common.put_to_streams(TopologyMaster.USER_DEFINED_STREAM, Thrift.directOutputFields(fields));
GlobalStreamId stream = new GlobalStreamId(TOPOLOGY_MASTER_COMPONENT_ID, TOPOLOGY_MASTER_CONTROL_STREAM_ID);
common.put_to_inputs(stream, Thrift.mkDirectGrouping());
spout.set_common(common);
}
ret.put_to_bolts(TOPOLOGY_MASTER_COMPONENT_ID, topologyMasterBolt);
} |
python | def connection(self, shareable=False):
"""Get a steady, persistent DB-API 2 connection.
The shareable parameter exists only for compatibility with the
PooledDB connection method. In reality, persistent connections
are of course never shared with other threads.
"""
try:
con = self.thread.connection
except AttributeError:
con = self.steady_connection()
if not con.threadsafety():
raise NotSupportedError("Database module is not thread-safe.")
self.thread.connection = con
con._ping_check()
return con |
java | public CalledRemoteApiCounter increment(String facadeName) {
if (facadeName == null) {
throw new IllegalArgumentException("The argument 'facadeName' should not be null.");
}
if (facadeCountMap == null) {
facadeCountMap = new LinkedHashMap<String, Integer>();
}
final Integer count = facadeCountMap.get(facadeName);
if (count != null) {
facadeCountMap.put(facadeName, count + 1);
} else {
facadeCountMap.put(facadeName, 1);
}
return this;
} |
python | def port_get_tag(port):
'''
Lists tags of the port.
Args:
port: A string - port name.
Returns:
List of tags (or empty list), False on failure.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' openvswitch.port_get_tag tap0
'''
cmd = 'ovs-vsctl get port {0} tag'.format(port)
result = __salt__['cmd.run_all'](cmd)
retcode = result['retcode']
stdout = result['stdout']
return _stdout_list_split(retcode, stdout) |
python | def check_available(self):
"""
Check for availability of a service and provide run metrics.
"""
success = True
start_time = datetime.datetime.utcnow()
message = ''
LOGGER.debug('Checking service id %s' % self.id)
try:
title = None
abstract = None
keywords = []
wkt_geometry = None
srs = '4326'
if self.type == 'OGC:CSW':
ows = CatalogueServiceWeb(self.url)
title = ows.identification.title
abstract = ows.identification.abstract
keywords = ows.identification.keywords
if self.type == 'OGC:WMS':
ows = get_wms_version_negotiate(self.url)
title = ows.identification.title
abstract = ows.identification.abstract
keywords = ows.identification.keywords
for c in ows.contents:
if ows.contents[c].parent is None:
wkt_geometry = bbox2wktpolygon(ows.contents[c].boundingBoxWGS84)
break
if self.type == 'OGC:WMTS':
ows = WebMapTileService(self.url)
title = ows.identification.title
abstract = ows.identification.abstract
keywords = ows.identification.keywords
if self.type == 'OSGeo:TMS':
ows = TileMapService(self.url)
title = ows.identification.title
abstract = ows.identification.abstract
keywords = ows.identification.keywords
if self.type == 'ESRI:ArcGIS:MapServer':
esri = ArcMapService(self.url)
extent, srs = get_esri_extent(esri)
title = esri.mapName
if len(title) == 0:
title = get_esri_service_name(self.url)
wkt_geometry = bbox2wktpolygon([
extent['xmin'],
extent['ymin'],
extent['xmax'],
extent['ymax']
])
if self.type == 'ESRI:ArcGIS:ImageServer':
esri = ArcImageService(self.url)
extent, srs = get_esri_extent(esri)
title = esri._json_struct['name']
if len(title) == 0:
title = get_esri_service_name(self.url)
wkt_geometry = bbox2wktpolygon([
extent['xmin'],
extent['ymin'],
extent['xmax'],
extent['ymax']
])
if self.type == 'Hypermap:WorldMap':
urllib2.urlopen(self.url)
if self.type == 'Hypermap:WorldMapLegacy':
urllib2.urlopen(self.url)
title = 'Harvard WorldMap Legacy'
if self.type == 'Hypermap:WARPER':
urllib2.urlopen(self.url)
# update title without raising a signal and recursion
if title:
self.title = title
Service.objects.filter(id=self.id).update(title=title)
if abstract:
self.abstract = abstract
Service.objects.filter(id=self.id).update(abstract=abstract)
if keywords:
for kw in keywords:
# FIXME: persist keywords to Django model
self.keywords.add(kw)
if wkt_geometry:
self.wkt_geometry = wkt_geometry
Service.objects.filter(id=self.id).update(wkt_geometry=wkt_geometry)
xml = create_metadata_record(
identifier=self.id_string,
source=self.url,
links=[[self.type, self.url]],
format=self.type,
type='service',
title=title,
abstract=abstract,
keywords=keywords,
wkt_geometry=self.wkt_geometry,
srs=srs
)
anytexts = gen_anytext(title, abstract, keywords)
Service.objects.filter(id=self.id).update(anytext=anytexts, xml=xml, csw_type='service')
except Exception, e:
LOGGER.error(e, exc_info=True)
message = str(e)
success = False
end_time = datetime.datetime.utcnow()
delta = end_time - start_time
response_time = '%s.%s' % (delta.seconds, delta.microseconds)
check = Check(
content_object=self,
success=success,
response_time=response_time,
message=message
)
check.save()
LOGGER.debug('Service checked in %s seconds, status is %s' % (response_time, success)) |
python | def advection(scalar, wind, deltas):
r"""Calculate the advection of a scalar field by the wind.
The order of the dimensions of the arrays must match the order in which
the wind components are given. For example, if the winds are given [u, v],
then the scalar and wind arrays must be indexed as x,y (which puts x as the
rows, not columns).
Parameters
----------
scalar : N-dimensional array
Array (with N-dimensions) with the quantity to be advected.
wind : sequence of arrays
Length M sequence of N-dimensional arrays. Represents the flow,
with a component of the wind in each dimension. For example, for
horizontal advection, this could be a list: [u, v], where u and v
are each a 2-dimensional array.
deltas : sequence of float or ndarray
A (length M) sequence containing the grid spacing(s) in each dimension. If using
arrays, in each array there should be one item less than the size of `scalar` along the
applicable axis.
Returns
-------
N-dimensional array
An N-dimensional array containing the advection at all grid points.
"""
# This allows passing in a list of wind components or an array.
wind = _stack(wind)
# If we have more than one component, we need to reverse the order along the first
# dimension so that the wind components line up with the
# order of the gradients from the ..., y, x ordered array.
if wind.ndim > scalar.ndim:
wind = wind[::-1]
# Gradient returns a list of derivatives along each dimension. We convert
# this to an array with dimension as the first index. Reverse the deltas to line up
# with the order of the dimensions.
grad = _stack(gradient(scalar, deltas=deltas[::-1]))
# Make them be at least 2D (handling the 1D case) so that we can do the
# multiply and sum below
grad, wind = atleast_2d(grad, wind)
return (-grad * wind).sum(axis=0) |
java | public static boolean startsWith(final boolean caseSensitive, final CharSequence text, final CharSequence prefix) {
if (text == null) {
throw new IllegalArgumentException("Text cannot be null");
}
if (prefix == null) {
throw new IllegalArgumentException("Prefix cannot be null");
}
if (text instanceof String && prefix instanceof String) {
return (caseSensitive ? ((String)text).startsWith((String)prefix) : startsWith(caseSensitive, text, 0, text.length(), prefix, 0, prefix.length()));
}
return startsWith(caseSensitive, text, 0, text.length(), prefix, 0, prefix.length());
} |
python | def main():
"""
Example of self documenting (of sorts) code, via aikif.
Simply call functions like below to build an overview
which has metadata automatically updated.
"""
fldr = mod_cfg.fldrs['program_path']
p = mod_prg.Programs('AIKIF Programs', fldr)
document_core_programs(p)
document_agents(p)
document_examples(p)
# p.list() # get list of all programs
p.save(fldr + os.sep + 'examples' + os.sep + 'document_AIKIF.csv')
p.collect_program_info('progress.md') |
python | def read(self, filenames, encoding=None):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if PY2 and isinstance(filenames, bytes):
# we allow for a little unholy magic for Python 2 so that
# people not using unicode_literals can still use the library
# conveniently
warnings.warn(
"You passed a bytestring as `filenames`. This will not work"
" on Python 3. Use `cp.read_file()` or switch to using Unicode"
" strings across the board.",
DeprecationWarning,
stacklevel=2,
)
filenames = [filenames]
elif isinstance(filenames, str):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
except IOError:
continue
read_ok.append(filename)
return read_ok |
java | public int update(@NotNull SqlQuery query) {
return withCurrentTransaction(query, tx -> {
logQuery(query);
try (PreparedStatement ps = tx.getConnection().prepareStatement(query.getSql())) {
prepareStatementFromQuery(ps, query);
long startTime = currentTimeMillis();
int count = ps.executeUpdate();
logQueryExecution(query, currentTimeMillis() - startTime);
return count;
}
});
} |
java | public String getLocalizedResource(String namespace, String resourceId) throws Exception {
String resourceValue = "";
Class r = Class.forName(namespace + "$string");
Field f = r.getField(resourceId);
resourceValue = getCurrentActivity().getResources().getString(f.getInt(f));
return resourceValue;
} |
java | @Nullable
public Map<Sha256Hash, Integer> getAppearsInHashes() {
return appearsInHashes != null ? ImmutableMap.copyOf(appearsInHashes) : null;
} |
python | def trigger(self, identifier, force=True):
"""Trigger an upgrade task."""
self.debug(identifier)
url = "{base}/{identifier}".format(
base=self.local_base_url,
identifier=identifier
)
param = {}
if force:
param['force'] = force
encode = urllib.urlencode(param)
if encode:
url += "?"
url += encode
return self.core.update(url, {}) |
java | @Override
public Collection<Person> findBySurname(final FinderObject owner,
final String surname) {
final Root root = (Root) owner;
final RootVisitor visitor = new RootVisitor();
root.accept(visitor);
final List<Person> matches = new ArrayList<>();
for (final Person person : visitor.getPersons()) {
final String personSurname = person.getSurname();
if (personSurname.equals(surname)) {
matches.add(person);
}
}
Collections.sort(matches, new PersonComparator());
return matches;
} |
java | public void writeAttribute(String attributeName, String value) throws IOException {
this.attribute(null, attributeName, value);
} |
python | def _main():
""" CLI interface """
import sys
if len(sys.argv) < 2:
_usage('Expected at least one parameter!')
sc = sys.argv[1]
options = sys.argv[2:]
if sc == 'a' or sc == 'advertise':
if len(options) > 5 or len(options) < 2:
_usage()
stype,port = options[:2]
advertisername = options[2] if len(options) > 2 else None
sname = options[3] if len(options) > 3 else ''
slocation = options[4] if len(options) > 4 else ''
service = Service(stype, port, sname, slocation)
advertiser = Advertiser([service], advertisername)
advertiser.run()
elif sc == 's' or sc == 'seek':
if len(options) > 4:
_usage()
aname = options[0] if len(options) > 0 else ''
stype = options[1] if len(options) > 1 else ''
sname = options[2] if len(options) > 2 else ''
se = Seeker(aname, stype, sname, find_callback=_print_result, error_callback=_print_error)
se.run()
else:
_usage('Unknown subcommand "' + sys.argv[0] + '"') |
java | public static void permutationVector( DMatrixSparseCSC P , int[] vector) {
if( P.numCols != P.numRows ) {
throw new MatrixDimensionException("Expected a square matrix");
} else if( P.nz_length != P.numCols ) {
throw new IllegalArgumentException("Expected N non-zero elements in permutation matrix");
} else if( vector.length < P.numCols ) {
throw new IllegalArgumentException("vector is too short");
}
int M = P.numCols;
for (int i = 0; i < M; i++) {
if( P.col_idx[i+1] != i+1 )
throw new IllegalArgumentException("Unexpected number of elements in a column");
vector[P.nz_rows[i]] = i;
}
} |
java | public String getDisplayName(ULocale locale)
{
String dispName = name;
try {
ResourceBundle bundle = UResourceBundle.getBundleInstance("android.icu.impl.data.HolidayBundle", locale);
dispName = bundle.getString(name);
}
catch (MissingResourceException e) {
}
return dispName;
} |
python | def is_rpm_installed():
"""Tests if the rpm command is present."""
try:
version_result = subprocess.run(["rpm", "--usage"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
rpm_installed = not version_result.returncode
except FileNotFoundError:
rpm_installed = False
return rpm_installed |
java | public long set(long instant, int year) {
FieldUtils.verifyValueBounds(this, Math.abs(year),
iChronology.getMinYear(), iChronology.getMaxYear());
//
// Do nothing if no real change is requested.
//
int thisWeekyear = get( instant );
if ( thisWeekyear == year ) {
return instant;
}
//
// Calculate the DayOfWeek (to be preserved).
//
int thisDow = iChronology.getDayOfWeek(instant);
//
// Calculate the maximum weeks in the target year.
//
int weeksInFromYear = iChronology.getWeeksInYear( thisWeekyear );
int weeksInToYear = iChronology.getWeeksInYear( year );
int maxOutWeeks = (weeksInToYear < weeksInFromYear) ?
weeksInToYear : weeksInFromYear;
//
// Get the current week of the year. This will be preserved in
// the output unless it is greater than the maximum possible
// for the target weekyear. In that case it is adjusted
// to the maximum possible.
//
int setToWeek = iChronology.getWeekOfWeekyear(instant);
if ( setToWeek > maxOutWeeks ) {
setToWeek = maxOutWeeks;
}
//
// Get a wroking copy of the current date-time.
// This can be a convenience for debugging.
//
long workInstant = instant; // Get a copy
//
// Attempt to get close to the proper weekyear.
// Note - we cannot currently call ourself, so we just call
// set for the year. This at least gets us close.
//
workInstant = iChronology.setYear( workInstant, year );
//
// Calculate the weekyear number for the get close to value
// (which might not be equal to the year just set).
//
int workWoyYear = get( workInstant );
//
// At most we are off by one year, which can be "fixed" by
// adding/subtracting a week.
//
if ( workWoyYear < year ) {
workInstant += DateTimeConstants.MILLIS_PER_WEEK;
} else if ( workWoyYear > year ) {
workInstant -= DateTimeConstants.MILLIS_PER_WEEK;
}
//
// Set the proper week in the current weekyear.
//
// BEGIN: possible set WeekOfWeekyear logic.
int currentWoyWeek = iChronology.getWeekOfWeekyear(workInstant);
// No range check required (we already know it is OK).
workInstant = workInstant + (setToWeek - currentWoyWeek)
* (long)DateTimeConstants.MILLIS_PER_WEEK;
// END: possible set WeekOfWeekyear logic.
//
// Reset DayOfWeek to previous value.
//
// Note: This works fine, but it ideally shouldn't invoke other
// fields from within a field.
workInstant = iChronology.dayOfWeek().set( workInstant, thisDow );
//
// Return result.
//
return workInstant;
} |
java | public void setImage(String src) {
if (src == null) {
throw new RuntimeException("Cannot set image to null. Call removeImage() if you wanted to remove the image");
}
if (imageElem == null) {
imageElem = Document.get().createImageElement();
// must be first child according to jquery.mobile-1.4.x.css
if (anchor != null) anchor.insertFirst(imageElem);
else insertFirstChild(imageElem);
}
imageElem.setAttribute("src", src);
getElement().addClassName(STYLE_UI_LI_HAS_THUMB);
} |
java | protected void validateArgumentList(String[] args) {
checkRequiredArguments(args);
// Skip the first argument as it is the task name
// Arguments and values come in pairs (expect -password).
// Anything outside of that pattern is invalid.
// Loop through, jumping in pairs except when we encounter
// -password -- that may be an interactive prompt which won't
// define a value.
for (int i = 1; i < args.length; i++) {
String argPair = args[i];
String arg = null;
String value = null;
if (argPair.contains("=")) {
arg = argPair.split("=")[0];
value = getValue(argPair);
} else {
arg = argPair;
}
if (!isKnownArgument(arg)) {
throw new IllegalArgumentException(getMessage("invalidArg", arg));
} else {
if (value == null) {
throw new IllegalArgumentException(getMessage("missingValue", arg));
}
}
}
} |
java | Rule InlineField() {
return FirstOf(IfieldArea(), IfieldBook(), IfieldComposer(),
IfieldDiscography(), IfieldGroup(), IfieldHistory(), IfieldLength(),
IfieldMeter(), IfieldNotes(), IfieldOrigin(), IfieldPart(),
IfieldTempo(), IfieldRhythm(), IfieldSource(), IfieldTitle(),
IfieldVoice(), IfieldWords(), IfieldLyrics(), IfieldTranscription(),
IfieldKey(), IfieldUserdefPlay(), IfieldUserdefPrint(),
IfieldInstruction(), IUnusedField()
).label(InlineField);
} |
java | private void executeRequestInterceptors(final IntuitMessage intuitMessage) throws FMSException {
Iterator<Interceptor> itr = requestInterceptors.iterator();
while (itr.hasNext()) {
Interceptor interceptor = itr.next();
interceptor.execute(intuitMessage);
}
} |
java | @Override
public CommerceTierPriceEntry fetchCommerceTierPriceEntryByUuidAndGroupId(
String uuid, long groupId) {
return commerceTierPriceEntryPersistence.fetchByUUID_G(uuid, groupId);
} |
python | def entry_detail(request, slug, template='djournal/entry_detail.html'):
'''Returns a response of an individual entry, for the given slug.'''
entry = get_object_or_404(Entry.public, slug=slug)
context = {
'entry': entry,
}
return render_to_response(
template,
context,
context_instance=RequestContext(request),
) |
java | @Pure
public static double toESRI_m(double measure) {
if (Double.isInfinite(measure) || Double.isNaN(measure)) {
return ESRI_NAN;
}
return measure;
} |
java | private void updateComplex(String string)
{
if (string != null && StringUtils.getExpressionKey(string) != null
&& !string.equals(StringUtils.getExpressionKey(string)))
{
complex = true;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.