Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
615 |
public class BroadleafThymeleafServletContextTemplateResolver extends ServletContextTemplateResolver {
protected String templateFolder = "";
@Override
protected String computeResourceName(final TemplateProcessingParameters templateProcessingParameters) {
String themePath = null;
Theme theme = BroadleafRequestContext.getBroadleafRequestContext().getTheme();
if (theme != null && theme.getPath() != null) {
themePath = theme.getPath();
}
checkInitialized();
final String templateName = templateProcessingParameters.getTemplateName();
Validate.notNull(templateName, "Template name cannot be null");
String unaliasedName = this.getTemplateAliases().get(templateName);
if (unaliasedName == null) {
unaliasedName = templateName;
}
final StringBuilder resourceName = new StringBuilder();
String prefix = this.getPrefix();
if (prefix != null && ! prefix.trim().equals("")) {
if (themePath != null) {
resourceName.append(prefix).append(themePath).append('/').append(templateFolder);
}
}
resourceName.append(unaliasedName);
String suffix = this.getSuffix();
if (suffix != null && ! suffix.trim().equals("")) {
resourceName.append(suffix);
}
return resourceName.toString();
}
public String getTemplateFolder() {
return templateFolder;
}
public void setTemplateFolder(String templateFolder) {
this.templateFolder = templateFolder;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_web_BroadleafThymeleafServletContextTemplateResolver.java
|
5,966 |
public final class SuggestUtils {
public static Comparator<SuggestWord> LUCENE_FREQUENCY = new SuggestWordFrequencyComparator();
public static Comparator<SuggestWord> SCORE_COMPARATOR = SuggestWordQueue.DEFAULT_COMPARATOR;
private SuggestUtils() {
// utils!!
}
public static DirectSpellChecker getDirectSpellChecker(DirectSpellcheckerSettings suggestion) {
DirectSpellChecker directSpellChecker = new DirectSpellChecker();
directSpellChecker.setAccuracy(suggestion.accuracy());
Comparator<SuggestWord> comparator;
switch (suggestion.sort()) {
case SCORE:
comparator = SCORE_COMPARATOR;
break;
case FREQUENCY:
comparator = LUCENE_FREQUENCY;
break;
default:
throw new ElasticsearchIllegalArgumentException("Illegal suggest sort: " + suggestion.sort());
}
directSpellChecker.setComparator(comparator);
directSpellChecker.setDistance(suggestion.stringDistance());
directSpellChecker.setMaxEdits(suggestion.maxEdits());
directSpellChecker.setMaxInspections(suggestion.maxInspections());
directSpellChecker.setMaxQueryFrequency(suggestion.maxTermFreq());
directSpellChecker.setMinPrefix(suggestion.prefixLength());
directSpellChecker.setMinQueryLength(suggestion.minWordLength());
directSpellChecker.setThresholdFrequency(suggestion.minDocFreq());
directSpellChecker.setLowerCaseTerms(false);
return directSpellChecker;
}
public static BytesRef join(BytesRef separator, BytesRef result, BytesRef... toJoin) {
int len = separator.length * toJoin.length - 1;
for (BytesRef br : toJoin) {
len += br.length;
}
result.grow(len);
return joinPreAllocated(separator, result, toJoin);
}
public static BytesRef joinPreAllocated(BytesRef separator, BytesRef result, BytesRef... toJoin) {
result.length = 0;
result.offset = 0;
for (int i = 0; i < toJoin.length - 1; i++) {
BytesRef br = toJoin[i];
System.arraycopy(br.bytes, br.offset, result.bytes, result.offset, br.length);
result.offset += br.length;
System.arraycopy(separator.bytes, separator.offset, result.bytes, result.offset, separator.length);
result.offset += separator.length;
}
final BytesRef br = toJoin[toJoin.length-1];
System.arraycopy(br.bytes, br.offset, result.bytes, result.offset, br.length);
result.length = result.offset + br.length;
result.offset = 0;
return result;
}
public static abstract class TokenConsumer {
protected CharTermAttribute charTermAttr;
protected PositionIncrementAttribute posIncAttr;
protected OffsetAttribute offsetAttr;
public void reset(TokenStream stream) {
charTermAttr = stream.addAttribute(CharTermAttribute.class);
posIncAttr = stream.addAttribute(PositionIncrementAttribute.class);
offsetAttr = stream.addAttribute(OffsetAttribute.class);
}
protected BytesRef fillBytesRef(BytesRef spare) {
spare.offset = 0;
spare.length = spare.bytes.length;
char[] source = charTermAttr.buffer();
UnicodeUtil.UTF16toUTF8(source, 0, charTermAttr.length(), spare);
return spare;
}
public abstract void nextToken() throws IOException;
public void end() {}
}
public static int analyze(Analyzer analyzer, BytesRef toAnalyze, String field, TokenConsumer consumer, CharsRef spare) throws IOException {
UnicodeUtil.UTF8toUTF16(toAnalyze, spare);
return analyze(analyzer, spare, field, consumer);
}
public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException {
TokenStream ts = analyzer.tokenStream(
field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length)
);
return analyze(ts, consumer);
}
public static int analyze(TokenStream stream, TokenConsumer consumer) throws IOException {
stream.reset();
consumer.reset(stream);
int numTokens = 0;
while (stream.incrementToken()) {
consumer.nextToken();
numTokens++;
}
consumer.end();
stream.close();
return numTokens;
}
public static SuggestMode resolveSuggestMode(String suggestMode) {
suggestMode = suggestMode.toLowerCase(Locale.US);
if ("missing".equals(suggestMode)) {
return SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
} else if ("popular".equals(suggestMode)) {
return SuggestMode.SUGGEST_MORE_POPULAR;
} else if ("always".equals(suggestMode)) {
return SuggestMode.SUGGEST_ALWAYS;
} else {
throw new ElasticsearchIllegalArgumentException("Illegal suggest mode " + suggestMode);
}
}
public static Suggest.Suggestion.Sort resolveSort(String sortVal) {
if ("score".equals(sortVal)) {
return Suggest.Suggestion.Sort.SCORE;
} else if ("frequency".equals(sortVal)) {
return Suggest.Suggestion.Sort.FREQUENCY;
} else {
throw new ElasticsearchIllegalArgumentException("Illegal suggest sort " + sortVal);
}
}
public static StringDistance resolveDistance(String distanceVal) {
if ("internal".equals(distanceVal)) {
return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
} else if ("damerau_levenshtein".equals(distanceVal) || "damerauLevenshtein".equals(distanceVal)) {
return new LuceneLevenshteinDistance();
} else if ("levenstein".equals(distanceVal)) {
return new LevensteinDistance();
//TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein
} else if ("jarowinkler".equals(distanceVal)) {
return new JaroWinklerDistance();
} else if ("ngram".equals(distanceVal)) {
return new NGramDistance();
} else {
throw new ElasticsearchIllegalArgumentException("Illegal distance option " + distanceVal);
}
}
public static class Fields {
public static final ParseField STRING_DISTANCE = new ParseField("string_distance");
public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode");
public static final ParseField MAX_EDITS = new ParseField("max_edits");
public static final ParseField MAX_INSPECTIONS = new ParseField("max_inspections");
// TODO some of these constants are the same as MLT constants and
// could be moved to a shared class for maintaining consistency across
// the platform
public static final ParseField MAX_TERM_FREQ = new ParseField("max_term_freq");
public static final ParseField PREFIX_LENGTH = new ParseField("prefix_length", "prefix_len");
public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len");
public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq");
public static final ParseField SHARD_SIZE = new ParseField("shard_size");
}
public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName,
DirectSpellcheckerSettings suggestion) throws IOException {
if ("accuracy".equals(fieldName)) {
suggestion.accuracy(parser.floatValue());
} else if (Fields.SUGGEST_MODE.match(fieldName)) {
suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text()));
} else if ("sort".equals(fieldName)) {
suggestion.sort(SuggestUtils.resolveSort(parser.text()));
} else if (Fields.STRING_DISTANCE.match(fieldName)) {
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
} else if (Fields.MAX_EDITS.match(fieldName)) {
suggestion.maxEdits(parser.intValue());
if (suggestion.maxEdits() < 1 || suggestion.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
throw new ElasticsearchIllegalArgumentException("Illegal max_edits value " + suggestion.maxEdits());
}
} else if (Fields.MAX_INSPECTIONS.match(fieldName)) {
suggestion.maxInspections(parser.intValue());
} else if (Fields.MAX_TERM_FREQ.match(fieldName)) {
suggestion.maxTermFreq(parser.floatValue());
} else if (Fields.PREFIX_LENGTH.match(fieldName)) {
suggestion.prefixLength(parser.intValue());
} else if (Fields.MIN_WORD_LENGTH.match(fieldName)) {
suggestion.minQueryLength(parser.intValue());
} else if (Fields.MIN_DOC_FREQ.match(fieldName)) {
suggestion.minDocFreq(parser.floatValue());
} else {
return false;
}
return true;
}
public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName,
SuggestionSearchContext.SuggestionContext suggestion) throws IOException {
if ("analyzer".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new ElasticsearchIllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
suggestion.setAnalyzer(analyzer);
} else if ("field".equals(fieldName)) {
suggestion.setField(parser.text());
} else if ("size".equals(fieldName)) {
suggestion.setSize(parser.intValue());
} else if (Fields.SHARD_SIZE.match(fieldName)) {
suggestion.setShardSize(parser.intValue());
} else {
return false;
}
return true;
}
public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) {
// Verify options and set defaults
if (suggestion.getField() == null) {
throw new ElasticsearchIllegalArgumentException("The required field option is missing");
}
if (suggestion.getText() == null) {
if (globalText == null) {
throw new ElasticsearchIllegalArgumentException("The required text option is missing");
}
suggestion.setText(globalText);
}
if (suggestion.getAnalyzer() == null) {
suggestion.setAnalyzer(mapperService.searchAnalyzer());
}
if (suggestion.getShardSize() == -1) {
suggestion.setShardSize(Math.max(suggestion.getSize(), 5));
}
}
public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) {
if (analyzer instanceof NamedAnalyzer) {
analyzer = ((NamedAnalyzer)analyzer).analyzer();
}
if (analyzer instanceof CustomAnalyzer) {
final CustomAnalyzer a = (CustomAnalyzer) analyzer;
final TokenFilterFactory[] tokenFilters = a.tokenFilters();
for (TokenFilterFactory tokenFilterFactory : tokenFilters) {
if (tokenFilterFactory instanceof ShingleTokenFilterFactory) {
return ((ShingleTokenFilterFactory)tokenFilterFactory).getInnerFactory();
} else if (tokenFilterFactory instanceof ShingleTokenFilterFactory.Factory) {
return (ShingleTokenFilterFactory.Factory) tokenFilterFactory;
}
}
}
return null;
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_suggest_SuggestUtils.java
|
3,427 |
private class ProxyEventProcessor
implements StripedRunnable {
final EventType type;
final String serviceName;
final DistributedObject object;
private ProxyEventProcessor(EventType eventType, String serviceName, DistributedObject object) {
this.type = eventType;
this.serviceName = serviceName;
this.object = object;
}
@Override
public void run() {
DistributedObjectEvent event = new DistributedObjectEvent(type, serviceName, object);
for (DistributedObjectListener listener : listeners.values()) {
if (EventType.CREATED.equals(type)) {
listener.distributedObjectCreated(event);
} else if (EventType.DESTROYED.equals(type)) {
listener.distributedObjectDestroyed(event);
}
}
}
@Override
public int getKey() {
return object.getId().hashCode();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_spi_impl_ProxyServiceImpl.java
|
163 |
public interface URLHandlerDao {
public URLHandler findURLHandlerByURI(String uri);
/**
* Gets all the URL handlers configured in the system
* @return
*/
public List<URLHandler> findAllURLHandlers();
public URLHandler saveURLHandler(URLHandler handler);
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_url_dao_URLHandlerDao.java
|
112 |
public class SimpleTxHook implements Synchronization
{
private volatile boolean gotBefore, gotAfter;
@Override
public void beforeCompletion()
{
gotBefore = true;
}
@Override
public void afterCompletion( int status )
{
gotAfter = true;
}
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_TestJtaCompliance.java
|
2,236 |
static class PRNG {
private static final long multiplier = 0x5DEECE66DL;
private static final long addend = 0xBL;
private static final long mask = (1L << 48) - 1;
final long originalSeed;
long seed;
PRNG(long seed) {
this.originalSeed = seed;
this.seed = (seed ^ multiplier) & mask;
}
public float random(int doc) {
if (doc == 0) {
doc = 0xCAFEBAB;
}
long rand = doc;
rand |= rand << 32;
rand ^= rand;
return nextFloat(rand);
}
public float nextFloat(long rand) {
seed = (seed * multiplier + addend) & mask;
rand ^= seed;
double result = rand / (double)(1L << 54);
return (float) result;
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_search_function_RandomScoreFunction.java
|
1,158 |
public class OSQLMethodSize extends OAbstractSQLMethod {
public static final String NAME = "size";
public OSQLMethodSize() {
super(NAME);
}
@Override
public Object execute(final OIdentifiable iCurrentRecord, final OCommandContext iContext, final Object ioResult,
final Object[] iMethodParams) {
final Number size;
if (ioResult != null) {
if (ioResult instanceof ORecord<?>)
size = 1;
else
size = OMultiValue.getSize(ioResult);
} else
size = 0;
return size;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodSize.java
|
6,323 |
public static class Names {
public static final String SAME = "same";
public static final String GENERIC = "generic";
public static final String GET = "get";
public static final String INDEX = "index";
public static final String BULK = "bulk";
public static final String SEARCH = "search";
public static final String SUGGEST = "suggest";
public static final String PERCOLATE = "percolate";
public static final String MANAGEMENT = "management";
public static final String FLUSH = "flush";
public static final String MERGE = "merge";
public static final String REFRESH = "refresh";
public static final String WARMER = "warmer";
public static final String SNAPSHOT = "snapshot";
public static final String OPTIMIZE = "optimize";
}
| 1no label
|
src_main_java_org_elasticsearch_threadpool_ThreadPool.java
|
728 |
public class CollectionAddAllRequest extends CollectionRequest {
protected List<Data> valueList;
public CollectionAddAllRequest() {
}
public CollectionAddAllRequest(String name, List<Data> valueList) {
super(name);
this.valueList = valueList;
}
@Override
protected Operation prepareOperation() {
return new CollectionAddAllOperation(name, valueList);
}
@Override
public int getClassId() {
return CollectionPortableHook.COLLECTION_ADD_ALL;
}
public void write(PortableWriter writer) throws IOException {
super.write(writer);
final ObjectDataOutput out = writer.getRawDataOutput();
out.writeInt(valueList.size());
for (Data value : valueList) {
value.writeData(out);
}
}
public void read(PortableReader reader) throws IOException {
super.read(reader);
final ObjectDataInput in = reader.getRawDataInput();
final int size = in.readInt();
valueList = new ArrayList<Data>(size);
for (int i = 0; i < size; i++) {
final Data value = new Data();
value.readData(in);
valueList.add(value);
}
}
@Override
public String getRequiredAction() {
return ActionConstants.ACTION_ADD;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_client_CollectionAddAllRequest.java
|
373 |
class AnnotationInformationControl extends AbstractInformationControl
implements IInformationControlExtension2 {
private final DefaultMarkerAnnotationAccess fMarkerAnnotationAccess;
private Control fFocusControl;
private AnnotationInfo fInput;
private Composite fParent;
AnnotationInformationControl(Shell parentShell,
String statusFieldText) {
super(parentShell, statusFieldText);
fMarkerAnnotationAccess = new DefaultMarkerAnnotationAccess();
create();
}
AnnotationInformationControl(Shell parentShell,
ToolBarManager toolBarManager) {
super(parentShell, toolBarManager);
fMarkerAnnotationAccess = new DefaultMarkerAnnotationAccess();
create();
}
@Override
public void setInformation(String information) {
//replaced by IInformationControlExtension2#setInput
}
@Override
public void setInput(Object input) {
Assert.isLegal(input instanceof AnnotationInfo);
fInput = (AnnotationInfo) input;
disposeDeferredCreatedContent();
deferredCreateContent();
}
@Override
public boolean hasContents() {
return fInput != null;
}
private AnnotationInfo getAnnotationInfo() {
return fInput;
}
@Override
public void setFocus() {
super.setFocus();
if (fFocusControl != null) {
fFocusControl.setFocus();
}
}
@Override
public final void setVisible(boolean visible) {
if (!visible) {
disposeDeferredCreatedContent();
}
super.setVisible(visible);
}
protected void disposeDeferredCreatedContent() {
Control[] children = fParent.getChildren();
for (int i=0; i<children.length; i++) {
children[i].dispose();
}
ToolBarManager toolBarManager= getToolBarManager();
if (toolBarManager != null)
toolBarManager.removeAll();
}
@Override
protected void createContent(Composite parent) {
fParent = parent;
GridLayout layout = new GridLayout(1, false);
layout.verticalSpacing = 0;
layout.marginWidth = 0;
layout.marginHeight = 0;
fParent.setLayout(layout);
}
@Override
public Point computeSizeHint() {
Point preferedSize = getShell()
.computeSize(SWT.DEFAULT, SWT.DEFAULT, true);
Point constrains = getSizeConstraints();
if (constrains == null) {
return preferedSize;
}
int trimWidth = getShell().computeTrim(0, 0, 0, 0).width;
Point constrainedSize = getShell()
.computeSize(constrains.x - trimWidth, SWT.DEFAULT, true);
int width = Math.min(preferedSize.x, constrainedSize.x);
int height = Math.max(preferedSize.y, constrainedSize.y);
return new Point(width, height);
}
/**
* Fills the toolbar actions, if a toolbar is available. This
* is called after the input has been set.
*/
protected void fillToolbar() {
ToolBarManager toolBarManager = getToolBarManager();
if (toolBarManager != null) {
fInput.fillToolBar(toolBarManager, this);
toolBarManager.update(true);
}
}
/**
* Create content of the hover. This is called after
* the input has been set.
*/
protected void deferredCreateContent() {
fillToolbar();
createAnnotationInformation(fParent);
setColorAndFont(fParent, fParent.getForeground(),
fParent.getBackground(),
CeylonEditor.getHoverFont());
ICompletionProposal[] proposals =
getAnnotationInfo().getCompletionProposals();
if (proposals.length > 0) {
createCompletionProposalsControl(fParent, proposals);
}
fParent.layout(true);
}
private void setColorAndFont(Control control, Color foreground,
Color background, Font font) {
control.setForeground(foreground);
control.setBackground(background);
control.setFont(font);
if (control instanceof Composite) {
Control[] children = ((Composite) control).getChildren();
for (int i=0; i<children.length; i++) {
setColorAndFont(children[i], foreground, background, font);
}
}
}
private void createAnnotationInformation(Composite parent) {
Composite composite = new Composite(parent, SWT.NONE);
composite.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false));
GridLayout layout = new GridLayout(2, false);
layout.marginHeight = 15;
layout.marginWidth = 15;
layout.horizontalSpacing = 4;
layout.marginRight = 5;
composite.setLayout(layout);
Annotation[] annotations = getAnnotationInfo()
.getAnnotationPositions().keySet()
.toArray(new Annotation[0]);
Arrays.sort(annotations, createAnnotationComparator());
for (final Annotation annotation: annotations) {
final Canvas canvas =
new Canvas(composite, SWT.NO_FOCUS);
GridData gridData =
new GridData(SWT.BEGINNING, SWT.BEGINNING, false, false);
gridData.widthHint = 17;
gridData.heightHint = 16;
canvas.setLayoutData(gridData);
canvas.addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
e.gc.setFont(null);
fMarkerAnnotationAccess.paint(annotation, e.gc, canvas,
new Rectangle(0, 0, 16, 16));
}
});
GridData data =
new GridData(SWT.FILL, SWT.FILL, true, true);
if (annotation instanceof RefinementAnnotation) {
Link link = new Link(composite, SWT.NONE);
String text = annotation.getText().replaceFirst(" ", " <a>") + "</a>";
link.setText(text);
link.addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
RefinementAnnotation ra = (RefinementAnnotation) annotation;
ra.gotoRefinedDeclaration(getAnnotationInfo().getEditor());
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {}
});
}
else {
StyledText text =
new StyledText(composite, SWT.MULTI | SWT.WRAP | SWT.READ_ONLY);
text.setLayoutData(data);
String annotationText = annotation.getText();
if (annotationText!=null && !annotationText.isEmpty()) {
annotationText = Character.toUpperCase(annotationText.charAt(0)) +
annotationText.substring(1);
StyledString styled =
Highlights.styleProposal(annotationText, true, true);
text.setText(styled.getString());
text.setStyleRanges(styled.getStyleRanges());
}
}
}
}
Comparator<Annotation> createAnnotationComparator() {
return new AnnotationComparator();
}
private void createCompletionProposalsControl(Composite parent,
ICompletionProposal[] proposals) {
Composite composite = new Composite(parent, SWT.NONE);
composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
GridLayout layout2 = new GridLayout(1, false);
layout2.marginHeight = 0;
layout2.marginWidth = 10;
layout2.verticalSpacing = 2;
composite.setLayout(layout2);
// Label separator = new Label(composite, SWT.SEPARATOR | SWT.HORIZONTAL);
// GridData gridData = new GridData(SWT.FILL, SWT.CENTER, true, false);
// separator.setLayoutData(gridData);
Label quickFixLabel = new Label(composite, SWT.NONE);
GridData layoutData =
new GridData(SWT.BEGINNING, SWT.CENTER, false, false);
layoutData.horizontalIndent = 4;
quickFixLabel.setLayoutData(layoutData);
String text;
if (proposals.length == 1) {
text = "1 quick fix available:";
}
else {
text = proposals.length + " quick fixes available:";
}
quickFixLabel.setText(text);
setColorAndFont(composite, parent.getForeground(),
parent.getBackground(),
CeylonEditor.getHoverFont());
createCompletionProposalsList(composite, proposals);
}
private void createCompletionProposalsList(Composite parent,
ICompletionProposal[] proposals) {
final ScrolledComposite scrolledComposite =
new ScrolledComposite(parent, SWT.V_SCROLL | SWT.H_SCROLL);
GridData gridData = new GridData(SWT.FILL, SWT.FILL, true, true);
scrolledComposite.setLayoutData(gridData);
scrolledComposite.setExpandVertical(false);
scrolledComposite.setExpandHorizontal(false);
Composite composite = new Composite(scrolledComposite, SWT.NONE);
composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
GridLayout layout = new GridLayout(3, false);
layout.verticalSpacing = 2;
composite.setLayout(layout);
final Link[] links = new Link[proposals.length];
for (int i=0; i<proposals.length; i++) {
Label indent = new Label(composite, SWT.NONE);
GridData gridData1 =
new GridData(SWT.BEGINNING, SWT.CENTER, false, false);
gridData1.widthHint = 0;
indent.setLayoutData(gridData1);
links[i] = createCompletionProposalLink(composite, proposals[i]);
}
scrolledComposite.setContent(composite);
setColorAndFont(scrolledComposite, parent.getForeground(),
parent.getBackground(),
JFaceResources.getDialogFont());
Point contentSize = composite.computeSize(SWT.DEFAULT, SWT.DEFAULT);
composite.setSize(contentSize);
Point constraints = getSizeConstraints();
if (constraints != null && contentSize.x < constraints.x) {
ScrollBar horizontalBar = scrolledComposite.getHorizontalBar();
int scrollBarHeight;
if (horizontalBar == null) {
Point scrollSize =
scrolledComposite.computeSize(SWT.DEFAULT, SWT.DEFAULT);
scrollBarHeight = scrollSize.y - contentSize.y;
}
else {
scrollBarHeight = horizontalBar.getSize().y;
}
gridData.heightHint = contentSize.y - scrollBarHeight;
}
fFocusControl = links[0];
for (int i=0; i<links.length; i++) {
final int index = i;
final Link link = links[index];
link.addKeyListener(new KeyListener() {
@Override
public void keyPressed(KeyEvent e) {
switch (e.keyCode) {
case SWT.ARROW_DOWN:
if (index + 1 < links.length) {
links[index + 1].setFocus();
}
break;
case SWT.ARROW_UP:
if (index > 0) {
links[index - 1].setFocus();
}
break;
default:
break;
}
}
@Override
public void keyReleased(KeyEvent e) {}
});
link.addFocusListener(new FocusListener() {
@Override
public void focusGained(FocusEvent e) {
int currentPosition = scrolledComposite.getOrigin().y;
int hight = scrolledComposite.getSize().y;
int linkPosition = link.getLocation().y;
if (linkPosition < currentPosition) {
if (linkPosition < 10) {
linkPosition= 0;
}
scrolledComposite.setOrigin(0, linkPosition);
}
else if (linkPosition + 20 > currentPosition + hight) {
scrolledComposite.setOrigin(0,
linkPosition - hight + link.getSize().y);
}
}
@Override
public void focusLost(FocusEvent e) {}
});
}
}
private Link createCompletionProposalLink(Composite parent,
final ICompletionProposal proposal) {
Label proposalImage = new Label(parent, SWT.NONE);
proposalImage.setLayoutData(new GridData(SWT.BEGINNING, SWT.CENTER, false, false));
Image image = proposal.getImage();
if (image != null) {
proposalImage.setImage(image);
proposalImage.addMouseListener(new MouseListener() {
@Override
public void mouseDoubleClick(MouseEvent e) {}
@Override
public void mouseDown(MouseEvent e) {}
@Override
public void mouseUp(MouseEvent e) {
if (e.button == 1) {
apply(proposal, fInput.getViewer());
}
}
});
}
Link proposalLink = new Link(parent, SWT.WRAP);
proposalLink.setLayoutData(new GridData(SWT.BEGINNING, SWT.CENTER, false, false));
proposalLink.setText("<a>" + proposal.getDisplayString().replace("&", "&&") + "</a>");
proposalLink.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
apply(proposal, fInput.getViewer());
}
});
return proposalLink;
}
private void apply(ICompletionProposal p, ITextViewer viewer/*, int offset*/) {
//Focus needs to be in the text viewer, otherwise linked mode does not work
dispose();
IRewriteTarget target = null;
try {
IDocument document = viewer.getDocument();
if (viewer instanceof ITextViewerExtension) {
ITextViewerExtension extension = (ITextViewerExtension) viewer;
target = extension.getRewriteTarget();
}
if (target != null)
target.beginCompoundChange();
// if (p instanceof ICompletionProposalExtension2) {
// ICompletionProposalExtension2 e= (ICompletionProposalExtension2) p;
// e.apply(viewer, (char) 0, SWT.NONE, offset);
// } else if (p instanceof ICompletionProposalExtension) {
// ICompletionProposalExtension e= (ICompletionProposalExtension) p;
// e.apply(document, (char) 0, offset);
// } else {
p.apply(document);
// }
Point selection = p.getSelection(document);
if (selection != null) {
viewer.setSelectedRange(selection.x, selection.y);
viewer.revealRange(selection.x, selection.y);
}
}
finally {
if (target != null)
target.endCompoundChange();
}
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_hover_AnnotationInformationControl.java
|
97 |
class ConvertToInterpolationProposal extends CorrectionProposal {
private ConvertToInterpolationProposal(String name, Change change) {
super(name, change, null);
}
static void addConvertToInterpolationProposal(Collection<ICompletionProposal> proposals,
IFile file, Tree.CompilationUnit cu, final Node node, IDocument doc) {
class ConcatenationVisitor extends Visitor {
Tree.SumOp result;
@Override
public void visit(Tree.SumOp that) {
if (that.getStartIndex()<=node.getStartIndex() &&
that.getStopIndex()>=node.getStopIndex()) {
Tree.Term lt = that.getLeftTerm();
Tree.Term rt = that.getRightTerm();
if ((lt instanceof Tree.StringLiteral ||
lt instanceof Tree.StringTemplate) &&
rt instanceof Tree.SumOp &&
(((Tree.SumOp) rt).getRightTerm()
instanceof Tree.StringLiteral ||
((Tree.SumOp) rt).getRightTerm()
instanceof Tree.StringTemplate)) {
result = that;
}
if ((rt instanceof Tree.StringLiteral ||
rt instanceof Tree.StringTemplate) &&
lt instanceof Tree.SumOp &&
(((Tree.SumOp) lt).getLeftTerm()
instanceof Tree.StringLiteral ||
((Tree.SumOp) lt).getLeftTerm()
instanceof Tree.StringTemplate)) {
result = that;
}
}
super.visit(that);
}
}
ConcatenationVisitor tv = new ConcatenationVisitor();
tv.visit(cu);
Tree.SumOp op = tv.result;
if (op!=null) {
TextFileChange change = new TextFileChange("Convert to Interpolation", file);
change.setEdit(new MultiTextEdit());
Tree.Term rt = op.getRightTerm();
Tree.Term lt = op.getLeftTerm();
if (rt instanceof Tree.StringLiteral ||
rt instanceof Tree.StringTemplate) {
change.addEdit(new ReplaceEdit(lt.getStopIndex()+1,
rt.getStartIndex()-lt.getStopIndex(),
"``"));
}
else {
Tree.SumOp rop = (Tree.SumOp) rt;
change.addEdit(new ReplaceEdit(rop.getLeftTerm().getStopIndex()+1,
rop.getRightTerm().getStartIndex()-rop.getLeftTerm().getStopIndex(),
"``"));
if (rop.getLeftTerm() instanceof Tree.QualifiedMemberExpression) {
Tree.QualifiedMemberExpression rlt =
(Tree.QualifiedMemberExpression) rop.getLeftTerm();
if (rlt.getDeclaration().getName().equals("string")) {
int from = rlt.getMemberOperator().getStartIndex();
int to = rlt.getIdentifier().getStartIndex();
change.addEdit(new DeleteEdit(from, to-from));
}
}
}
if (lt instanceof Tree.StringLiteral ||
lt instanceof Tree.StringTemplate) {
change.addEdit(new ReplaceEdit(lt.getStopIndex(),
rt.getStartIndex()-lt.getStopIndex(),
"``"));
}
else {
Tree.SumOp lop = (Tree.SumOp) lt;
change.addEdit(new ReplaceEdit(lop.getLeftTerm().getStopIndex(),
lop.getRightTerm().getStartIndex()-lop.getLeftTerm().getStopIndex(),
"``"));
if (lop.getRightTerm() instanceof Tree.QualifiedMemberExpression) {
Tree.QualifiedMemberExpression lrt =
(Tree.QualifiedMemberExpression) lop.getRightTerm();
if (lrt.getDeclaration().getName().equals("string")) {
int from = lrt.getMemberOperator().getStartIndex();
int to = lrt.getIdentifier().getStopIndex()+1;
change.addEdit(new DeleteEdit(from, to-from));
}
}
}
proposals.add(new ConvertToInterpolationProposal("Convert to string interpolation", change));
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ConvertToInterpolationProposal.java
|
131 |
public interface SchemaInspector {
/* ---------------------------------------------------------------
* Schema
* ---------------------------------------------------------------
*/
/**
* Checks whether a type with the specified name exists.
*
* @param name name of the type
* @return true, if a type with the given name exists, else false
*/
public boolean containsRelationType(String name);
/**
* Returns the type with the given name.
* Note, that type names must be unique.
*
* @param name name of the type to return
* @return The type with the given name, or null if such does not exist
* @see RelationType
*/
public RelationType getRelationType(String name);
/**
* Checks whether a property key of the given name has been defined in the Titan schema.
*
* @param name name of the property key
* @return true, if the property key exists, else false
*/
public boolean containsPropertyKey(String name);
/**
* Returns the property key with the given name. If automatic type making is enabled, it will make the property key
* using the configured default type maker if a key with the given name does not exist.
*
* @param name name of the property key to return
* @return the property key with the given name
* @throws IllegalArgumentException if a property key with the given name does not exist or if the
* type with the given name is not a property key
* @see PropertyKey
*/
public PropertyKey getOrCreatePropertyKey(String name);
/**
* Returns the property key with the given name. If it does not exist, NULL is returned
*
* @param name
* @return
*/
public PropertyKey getPropertyKey(String name);
/**
* Checks whether an edge label of the given name has been defined in the Titan schema.
*
* @param name name of the edge label
* @return true, if the edge label exists, else false
*/
public boolean containsEdgeLabel(String name);
/**
* Returns the edge label with the given name. If automatic type making is enabled, it will make the edge label
* using the configured default type maker if a label with the given name does not exist.
*
* @param name name of the edge label to return
* @return the edge label with the given name
* @throws IllegalArgumentException if an edge label with the given name does not exist or if the
* type with the given name is not an edge label
* @see EdgeLabel
*/
public EdgeLabel getOrCreateEdgeLabel(String name);
/**
* Returns the edge label with the given name. If it does not exist, NULL is returned
* @param name
* @return
*/
public EdgeLabel getEdgeLabel(String name);
/**
* Whether a vertex label with the given name exists in the graph.
*
* @param name
* @return
*/
public boolean containsVertexLabel(String name);
/**
* Returns the vertex label with the given name. If a vertex label with this name does not exist, the label is
* automatically created through the registered {@link com.thinkaurelius.titan.core.schema.DefaultSchemaMaker}.
* <p />
* Attempting to automatically create a vertex label might cause an exception depending on the configuration.
*
* @param name
* @return
*/
public VertexLabel getVertexLabel(String name);
}
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_schema_SchemaInspector.java
|
218 |
public class ClientWriteHandler extends ClientAbstractSelectionHandler implements Runnable {
private final Queue<SocketWritable> writeQueue = new ConcurrentLinkedQueue<SocketWritable>();
private final AtomicBoolean informSelector = new AtomicBoolean(true);
private final ByteBuffer buffer;
private boolean ready;
private SocketWritable lastWritable;
private volatile long lastHandle;
// private boolean initialized = false;
public ClientWriteHandler(ClientConnection connection, IOSelector ioSelector, int bufferSize) {
super(connection, ioSelector);
buffer = ByteBuffer.allocate(bufferSize);
}
@Override
public void handle() {
lastHandle = Clock.currentTimeMillis();
if (!connection.live()) {
return;
}
// if (!initialized) {
// initialized = true;
// buffer.put(Protocols.CLIENT_BINARY.getBytes());
// buffer.put(ClientTypes.JAVA.getBytes());
// registerWrite();
// }
if (lastWritable == null && (lastWritable = poll()) == null && buffer.position() == 0) {
ready = true;
return;
}
try {
while (buffer.hasRemaining() && lastWritable != null) {
boolean complete = lastWritable.writeTo(buffer);
if (complete) {
lastWritable = poll();
} else {
break;
}
}
if (buffer.position() > 0) {
buffer.flip();
try {
socketChannel.write(buffer);
} catch (Exception e) {
lastWritable = null;
handleSocketException(e);
return;
}
if (buffer.hasRemaining()) {
buffer.compact();
} else {
buffer.clear();
}
}
} catch (Throwable t) {
logger.severe("Fatal Error at WriteHandler for endPoint: " + connection.getEndPoint(), t);
} finally {
ready = false;
registerWrite();
}
}
public void enqueueSocketWritable(SocketWritable socketWritable) {
writeQueue.offer(socketWritable);
if (informSelector.compareAndSet(true, false)) {
// we don't have to call wake up if this WriteHandler is
// already in the task queue.
// we can have a counter to check this later on.
// for now, wake up regardless.
ioSelector.addTask(this);
ioSelector.wakeup();
}
}
private SocketWritable poll() {
return writeQueue.poll();
}
@Override
public void run() {
informSelector.set(true);
if (ready) {
handle();
} else {
registerWrite();
}
ready = false;
}
private void registerWrite() {
registerOp(SelectionKey.OP_WRITE);
}
@Override
public void shutdown() {
writeQueue.clear();
while (poll() != null) {
}
}
long getLastHandle() {
return lastHandle;
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_connection_nio_ClientWriteHandler.java
|
89 |
public interface ObjectToDouble<A> { double apply(A a); }
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
2,959 |
public class SortedIndexStore implements IndexStore {
private static final float LOAD_FACTOR = 0.75f;
private final ConcurrentMap<Comparable, ConcurrentMap<Data, QueryableEntry>> mapRecords
= new ConcurrentHashMap<Comparable, ConcurrentMap<Data, QueryableEntry>>(1000);
private final NavigableSet<Comparable> sortedSet = new ConcurrentSkipListSet<Comparable>();
@Override
public void getSubRecordsBetween(MultiResultSet results, Comparable from, Comparable to) {
Set<Comparable> values = sortedSet.subSet(from, to);
for (Comparable value : values) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(value);
if (records != null) {
results.addResultSet(records);
}
}
// to wasn't included so include now
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(to);
if (records != null) {
results.addResultSet(records);
}
}
@Override
public void getSubRecords(MultiResultSet results, ComparisonType comparisonType, Comparable searchedValue) {
Set<Comparable> values;
boolean notEqual = false;
switch (comparisonType) {
case LESSER:
values = sortedSet.headSet(searchedValue, false);
break;
case LESSER_EQUAL:
values = sortedSet.headSet(searchedValue, true);
break;
case GREATER:
values = sortedSet.tailSet(searchedValue, false);
break;
case GREATER_EQUAL:
values = sortedSet.tailSet(searchedValue, true);
break;
case NOT_EQUAL:
values = sortedSet;
notEqual = true;
break;
default:
throw new IllegalArgumentException("Unrecognized comparisonType:" + comparisonType);
}
for (Comparable value : values) {
if (notEqual && searchedValue.equals(value)) {
// skip this value if predicateType is NOT_EQUAL
continue;
}
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(value);
if (records != null) {
results.addResultSet(records);
}
}
}
@Override
public void newIndex(Comparable newValue, QueryableEntry record) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(newValue);
if (records == null) {
records = new ConcurrentHashMap<Data, QueryableEntry>(1, LOAD_FACTOR, 1);
mapRecords.put(newValue, records);
if (!(newValue instanceof IndexImpl.NullObject)) {
sortedSet.add(newValue);
}
}
records.put(record.getIndexKey(), record);
}
@Override
public ConcurrentMap<Data, QueryableEntry> getRecordMap(Comparable indexValue) {
return mapRecords.get(indexValue);
}
@Override
public void clear() {
mapRecords.clear();
sortedSet.clear();
}
@Override
public void removeIndex(Comparable oldValue, Data indexKey) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(oldValue);
if (records != null) {
records.remove(indexKey);
if (records.size() == 0) {
mapRecords.remove(oldValue);
sortedSet.remove(oldValue);
}
}
}
@Override
public Set<QueryableEntry> getRecords(Comparable value) {
return new SingleResultSet(mapRecords.get(value));
}
@Override
public void getRecords(MultiResultSet results, Set<Comparable> values) {
for (Comparable value : values) {
ConcurrentMap<Data, QueryableEntry> records = mapRecords.get(value);
if (records != null) {
results.addResultSet(records);
}
}
}
@Override
public String toString() {
return "SortedIndexStore{"
+ "mapRecords=" + mapRecords.size()
+ '}';
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_query_impl_SortedIndexStore.java
|
4,197 |
public class BlobStoreIndexShardSnapshot {
/**
* Information about snapshotted file
*/
public static class FileInfo {
private final String name;
private final String physicalName;
private final long length;
private final String checksum;
private final ByteSizeValue partSize;
private final long partBytes;
private final long numberOfParts;
/**
* Constructs a new instance of file info
*
* @param name file name as stored in the blob store
* @param physicalName original file name
* @param length total length of the file
* @param partSize size of the single chunk
* @param checksum checksum for the file
*/
public FileInfo(String name, String physicalName, long length, ByteSizeValue partSize, String checksum) {
this.name = name;
this.physicalName = physicalName;
this.length = length;
this.checksum = checksum;
long partBytes = Long.MAX_VALUE;
if (partSize != null) {
partBytes = partSize.bytes();
}
long totalLength = length;
long numberOfParts = totalLength / partBytes;
if (totalLength % partBytes > 0) {
numberOfParts++;
}
if (numberOfParts == 0) {
numberOfParts++;
}
this.numberOfParts = numberOfParts;
this.partSize = partSize;
this.partBytes = partBytes;
}
/**
* Returns the base file name
*
* @return file name
*/
public String name() {
return name;
}
/**
* Returns part name if file is stored as multiple parts
*
* @param part part number
* @return part name
*/
public String partName(long part) {
if (numberOfParts > 1) {
return name + ".part" + part;
} else {
return name;
}
}
/**
* Returns base file name from part name
*
* @param blobName part name
* @return base file name
*/
public static String canonicalName(String blobName) {
if (blobName.contains(".part")) {
return blobName.substring(0, blobName.indexOf(".part"));
}
return blobName;
}
/**
* Returns original file name
*
* @return original file name
*/
public String physicalName() {
return this.physicalName;
}
/**
* File length
*
* @return file length
*/
public long length() {
return length;
}
/**
* Returns part size
*
* @return part size
*/
public ByteSizeValue partSize() {
return partSize;
}
/**
* Return maximum number of bytes in a part
*
* @return maximum number of bytes in a part
*/
public long partBytes() {
return partBytes;
}
/**
* Returns number of parts
*
* @return number of parts
*/
public long numberOfParts() {
return numberOfParts;
}
/**
* Returns file md5 checksum provided by {@link org.elasticsearch.index.store.Store}
*
* @return file checksum
*/
@Nullable
public String checksum() {
return checksum;
}
/**
* Checks if a file in a store is the same file
*
* @param md file in a store
* @return true if file in a store this this file have the same checksum and length
*/
public boolean isSame(StoreFileMetaData md) {
if (checksum == null || md.checksum() == null) {
return false;
}
return length == md.length() && checksum.equals(md.checksum());
}
static final class Fields {
static final XContentBuilderString NAME = new XContentBuilderString("name");
static final XContentBuilderString PHYSICAL_NAME = new XContentBuilderString("physical_name");
static final XContentBuilderString LENGTH = new XContentBuilderString("length");
static final XContentBuilderString CHECKSUM = new XContentBuilderString("checksum");
static final XContentBuilderString PART_SIZE = new XContentBuilderString("part_size");
}
/**
* Serializes file info into JSON
*
* @param file file info
* @param builder XContent builder
* @param params parameters
* @throws IOException
*/
public static void toXContent(FileInfo file, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(Fields.NAME, file.name);
builder.field(Fields.PHYSICAL_NAME, file.physicalName);
builder.field(Fields.LENGTH, file.length);
if (file.checksum != null) {
builder.field(Fields.CHECKSUM, file.checksum);
}
if (file.partSize != null) {
builder.field(Fields.PART_SIZE, file.partSize.bytes());
}
builder.endObject();
}
/**
* Parses JSON that represents file info
*
* @param parser parser
* @return file info
* @throws IOException
*/
public static FileInfo fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
String name = null;
String physicalName = null;
long length = -1;
String checksum = null;
ByteSizeValue partSize = null;
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token.isValue()) {
if ("name".equals(currentFieldName)) {
name = parser.text();
} else if ("physical_name".equals(currentFieldName)) {
physicalName = parser.text();
} else if ("length".equals(currentFieldName)) {
length = parser.longValue();
} else if ("checksum".equals(currentFieldName)) {
checksum = parser.text();
} else if ("part_size".equals(currentFieldName)) {
partSize = new ByteSizeValue(parser.longValue());
} else {
throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]");
}
} else {
throw new ElasticsearchParseException("unexpected token [" + token + "]");
}
} else {
throw new ElasticsearchParseException("unexpected token [" + token + "]");
}
}
}
// TODO: Verify???
return new FileInfo(name, physicalName, length, partSize, checksum);
}
}
private final String snapshot;
private final long indexVersion;
private final ImmutableList<FileInfo> indexFiles;
/**
* Constructs new shard snapshot metadata from snapshot metadata
*
* @param snapshot snapshot id
* @param indexVersion index version
* @param indexFiles list of files in the shard
*/
public BlobStoreIndexShardSnapshot(String snapshot, long indexVersion, List<FileInfo> indexFiles) {
assert snapshot != null;
assert indexVersion >= 0;
this.snapshot = snapshot;
this.indexVersion = indexVersion;
this.indexFiles = ImmutableList.copyOf(indexFiles);
}
/**
* Returns index version
*
* @return index version
*/
public long indexVersion() {
return indexVersion;
}
/**
* Returns snapshot id
*
* @return snapshot id
*/
public String snapshot() {
return snapshot;
}
/**
* Returns list of files in the shard
*
* @return list of files
*/
public ImmutableList<FileInfo> indexFiles() {
return indexFiles;
}
/**
* Serializes shard snapshot metadata info into JSON
*
* @param snapshot shard snapshot metadata
* @param builder XContent builder
* @param params parameters
* @throws IOException
*/
public static void toXContent(BlobStoreIndexShardSnapshot snapshot, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field("name", snapshot.snapshot);
builder.field("index-version", snapshot.indexVersion);
builder.startArray("files");
for (FileInfo fileInfo : snapshot.indexFiles) {
FileInfo.toXContent(fileInfo, builder, params);
}
builder.endArray();
builder.endObject();
}
/**
* Parses shard snapshot metadata
*
* @param parser parser
* @return shard snapshot metadata
* @throws IOException
*/
public static BlobStoreIndexShardSnapshot fromXContent(XContentParser parser) throws IOException {
String snapshot = null;
long indexVersion = -1;
List<FileInfo> indexFiles = newArrayList();
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token.isValue()) {
if ("name".equals(currentFieldName)) {
snapshot = parser.text();
} else if ("index-version".equals(currentFieldName)) {
indexVersion = parser.longValue();
} else {
throw new ElasticsearchParseException("unknown parameter [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((parser.nextToken()) != XContentParser.Token.END_ARRAY) {
indexFiles.add(FileInfo.fromXContent(parser));
}
} else {
throw new ElasticsearchParseException("unexpected token [" + token + "]");
}
} else {
throw new ElasticsearchParseException("unexpected token [" + token + "]");
}
}
}
return new BlobStoreIndexShardSnapshot(snapshot, indexVersion, ImmutableList.<FileInfo>copyOf(indexFiles));
}
/**
* Returns true if this snapshot contains a file with a given original name
*
* @param physicalName original file name
* @return true if the file was found, false otherwise
*/
public boolean containPhysicalIndexFile(String physicalName) {
return findPhysicalIndexFile(physicalName) != null;
}
public FileInfo findPhysicalIndexFile(String physicalName) {
for (FileInfo file : indexFiles) {
if (file.physicalName().equals(physicalName)) {
return file;
}
}
return null;
}
/**
* Returns true if this snapshot contains a file with a given name
*
* @param name file name
* @return true if file was found, false otherwise
*/
public FileInfo findNameFile(String name) {
for (FileInfo file : indexFiles) {
if (file.name().equals(name)) {
return file;
}
}
return null;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_snapshots_blobstore_BlobStoreIndexShardSnapshot.java
|
1,541 |
@Service("bli18nUpdateCartServiceExtensionHandler")
public class i18nUpdateCartServiceExtensionHandler extends AbstractUpdateCartServiceExtensionHandler
implements UpdateCartServiceExtensionHandler {
protected static final Log LOG = LogFactory.getLog(i18nUpdateCartServiceExtensionHandler.class);
@Value("${clearCartOnLocaleSwitch}")
protected boolean clearCartOnLocaleSwitch = false;
@Resource(name = "blCatalogService")
protected CatalogService catalogService;
@Resource(name = "blUpdateCartServiceExtensionManager")
protected UpdateCartServiceExtensionManager extensionManager;
@PostConstruct
public void init() {
if (isEnabled()) {
extensionManager.getHandlers().add(this);
}
}
/**
* If the locale of the cart does not match the current locale, then this extension handler will
* attempt to translate the order items.
*
* The property "clearCartOnLocaleSwitch" can be set to true if the implementation desires to
* create a new cart when the locale is switched (3.0.6 and prior behavior).
*
* @param cart
* @param resultHolder
* @return
*/
public ExtensionResultStatusType updateAndValidateCart(Order cart, ExtensionResultHolder resultHolder) {
if (BroadleafRequestContext.hasLocale()) {
BroadleafRequestContext brc = BroadleafRequestContext.getBroadleafRequestContext();
if (!brc.getLocale().getLocaleCode().matches(cart.getLocale().getLocaleCode())) {
if (LOG.isDebugEnabled()) {
LOG.debug("The cart Locale [" + cart.getLocale().getLocaleCode() +
"] does not match the current locale [" + brc.getLocale().getLocaleCode() + "]");
}
if (clearCartOnLocaleSwitch) {
resultHolder.getContextMap().put("clearCart", Boolean.TRUE);
} else {
fixTranslations(cart);
cart.setLocale(brc.getLocale());
resultHolder.getContextMap().put("saveCart", Boolean.TRUE);
}
}
}
return ExtensionResultStatusType.HANDLED_CONTINUE;
}
protected void fixTranslations(Order cart) {
for (DiscreteOrderItem orderItem : cart.getDiscreteOrderItems()) {
Sku sku = orderItem.getSku();
translateOrderItem(orderItem, sku);
}
for (OrderItem orderItem : cart.getOrderItems()) {
if (orderItem instanceof BundleOrderItem) {
BundleOrderItem bundleItem = (BundleOrderItem) orderItem;
Sku sku = bundleItem.getSku();
translateOrderItem(orderItem, sku);
}
}
}
protected void translateOrderItem(OrderItem orderItem, Sku sku) {
if (sku != null) {
orderItem.setName(sku.getName());
}
if (sku.getProductOptionValues() != null) {
for (ProductOptionValue optionValue : sku.getProductOptionValues()) {
String key = optionValue.getProductOption().getAttributeName();
OrderItemAttribute attr = orderItem.getOrderItemAttributes().get(key);
if (attr != null) {
attr.setValue(optionValue.getAttributeValue());
} else {
OrderItemAttribute attribute = new OrderItemAttributeImpl();
attribute.setName(key);
attribute.setValue(optionValue.getAttributeValue());
attribute.setOrderItem(orderItem);
orderItem.getOrderItemAttributes().put(key, attribute);
}
}
}
}
}
| 1no label
|
core_broadleaf-framework-web_src_main_java_org_broadleafcommerce_core_web_service_i18nUpdateCartServiceExtensionHandler.java
|
722 |
public class TransportDeleteAction extends TransportShardReplicationOperationAction<DeleteRequest, DeleteRequest, DeleteResponse> {
private final AutoCreateIndex autoCreateIndex;
private final TransportCreateIndexAction createIndexAction;
private final TransportIndexDeleteAction indexDeleteAction;
@Inject
public TransportDeleteAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
TransportCreateIndexAction createIndexAction, TransportIndexDeleteAction indexDeleteAction) {
super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);
this.createIndexAction = createIndexAction;
this.indexDeleteAction = indexDeleteAction;
this.autoCreateIndex = new AutoCreateIndex(settings);
}
@Override
protected String executor() {
return ThreadPool.Names.INDEX;
}
@Override
protected void doExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) {
request.beforeLocalFork();
createIndexAction.execute(new CreateIndexRequest(request.index()).cause("auto(delete api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
innerExecute(request, listener);
} else {
listener.onFailure(e);
}
}
});
} else {
innerExecute(request, listener);
}
}
@Override
protected boolean resolveRequest(final ClusterState state, final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
request.routing(state.metaData().resolveIndexRouting(request.routing(), request.index()));
request.index(state.metaData().concreteIndex(request.index()));
if (state.metaData().hasIndex(request.index())) {
// check if routing is required, if so, do a broadcast delete
MappingMetaData mappingMd = state.metaData().index(request.index()).mappingOrDefault(request.type());
if (mappingMd != null && mappingMd.routing().required()) {
if (request.routing() == null) {
indexDeleteAction.execute(new IndexDeleteRequest(request), new ActionListener<IndexDeleteResponse>() {
@Override
public void onResponse(IndexDeleteResponse indexDeleteResponse) {
// go over the response, see if we have found one, and the version if found
long version = Versions.MATCH_ANY;
boolean found = false;
for (ShardDeleteResponse deleteResponse : indexDeleteResponse.getResponses()) {
if (deleteResponse.isFound()) {
version = deleteResponse.getVersion();
found = true;
break;
}
}
listener.onResponse(new DeleteResponse(request.index(), request.type(), request.id(), version, found));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
});
return false;
}
}
}
return true;
}
private void innerExecute(final DeleteRequest request, final ActionListener<DeleteResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected boolean checkWriteConsistency() {
return true;
}
@Override
protected DeleteRequest newRequestInstance() {
return new DeleteRequest();
}
@Override
protected DeleteRequest newReplicaRequestInstance() {
return new DeleteRequest();
}
@Override
protected DeleteResponse newResponseInstance() {
return new DeleteResponse();
}
@Override
protected String transportAction() {
return DeleteAction.NAME;
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, DeleteRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, DeleteRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, request.index());
}
@Override
protected PrimaryResponse<DeleteResponse, DeleteRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) {
DeleteRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version())
.versionType(request.versionType())
.origin(Engine.Operation.Origin.PRIMARY);
indexShard.delete(delete);
// update the request with teh version so it will go to the replicas
request.version(delete.version());
if (request.refresh()) {
try {
indexShard.refresh(new Engine.Refresh("refresh_flag_delete").force(false));
} catch (Exception e) {
// ignore
}
}
DeleteResponse response = new DeleteResponse(request.index(), request.type(), request.id(), delete.version(), delete.found());
return new PrimaryResponse<DeleteResponse, DeleteRequest>(shardRequest.request, response, null);
}
@Override
protected void shardOperationOnReplica(ReplicaOperationRequest shardRequest) {
DeleteRequest request = shardRequest.request;
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.request.index()).shardSafe(shardRequest.shardId);
Engine.Delete delete = indexShard.prepareDelete(request.type(), request.id(), request.version())
.origin(Engine.Operation.Origin.REPLICA);
indexShard.delete(delete);
if (request.refresh()) {
try {
indexShard.refresh(new Engine.Refresh("refresh_flag_delete").force(false));
} catch (Exception e) {
// ignore
}
}
}
@Override
protected ShardIterator shards(ClusterState clusterState, DeleteRequest request) {
return clusterService.operationRouting()
.deleteShards(clusterService.state(), request.index(), request.type(), request.id(), request.routing());
}
}
| 0true
|
src_main_java_org_elasticsearch_action_delete_TransportDeleteAction.java
|
157 |
class MockConnection implements Connection {
volatile boolean live = true;
final int port;
MockConnection(int port) {
this.port = port;
}
BlockingQueue<SocketWritable> q = new LinkedBlockingQueue<SocketWritable>();
public boolean write(SocketWritable packet) {
return q.offer(packet);
}
@Override
public Address getEndPoint() {
return null;
}
@Override
public boolean live() {
return live;
}
@Override
public long lastReadTime() {
return 0;
}
@Override
public long lastWriteTime() {
return 0;
}
@Override
public void close() {
live = false;
}
@Override
public boolean isClient() {
return true;
}
@Override
public ConnectionType getType() {
return ConnectionType.BINARY_CLIENT;
}
@Override
public InetAddress getInetAddress() {
return null;
}
@Override
public InetSocketAddress getRemoteSocketAddress() {
return null;
}
@Override
public int getPort() {
return port;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof MockConnection)) return false;
MockConnection that = (MockConnection) o;
if (port != that.port) return false;
return true;
}
@Override
public int hashCode() {
return port;
}
}
| 0true
|
hazelcast_src_test_java_com_hazelcast_client_MockSimpleClient.java
|
1,787 |
public class FieldPathBuilder {
protected DynamicDaoHelper dynamicDaoHelper = new DynamicDaoHelperImpl();
protected CriteriaQuery criteria;
protected List<Predicate> restrictions;
public FieldPath getFieldPath(From root, String fullPropertyName) {
String[] pieces = fullPropertyName.split("\\.");
List<String> associationPath = new ArrayList<String>();
List<String> basicProperties = new ArrayList<String>();
int j = 0;
for (String piece : pieces) {
checkPiece: {
if (j == 0) {
Path path = root.get(piece);
if (path instanceof PluralAttributePath) {
associationPath.add(piece);
break checkPiece;
}
}
basicProperties.add(piece);
}
j++;
}
FieldPath fieldPath = new FieldPath()
.withAssociationPath(associationPath)
.withTargetPropertyPieces(basicProperties);
return fieldPath;
}
public Path getPath(From root, String fullPropertyName, CriteriaBuilder builder) {
return getPath(root, getFieldPath(root, fullPropertyName), builder);
}
@SuppressWarnings({"rawtypes", "unchecked", "serial"})
public Path getPath(From root, FieldPath fieldPath, final CriteriaBuilder builder) {
FieldPath myFieldPath = fieldPath;
if (!StringUtils.isEmpty(fieldPath.getTargetProperty())) {
myFieldPath = getFieldPath(root, fieldPath.getTargetProperty());
}
From myRoot = root;
for (String pathElement : myFieldPath.getAssociationPath()) {
myRoot = myRoot.join(pathElement);
}
Path path = myRoot;
for (int i = 0; i < myFieldPath.getTargetPropertyPieces().size(); i++) {
String piece = myFieldPath.getTargetPropertyPieces().get(i);
if (path.getJavaType().isAnnotationPresent(Embeddable.class)) {
String original = ((SingularAttributePath) path).getAttribute().getDeclaringType().getJavaType().getName() + "." + ((SingularAttributePath) path).getAttribute().getName() + "." + piece;
String copy = path.getJavaType().getName() + "." + piece;
copyCollectionPersister(original, copy, ((CriteriaBuilderImpl) builder).getEntityManagerFactory().getSessionFactory());
}
try {
path = path.get(piece);
} catch (IllegalArgumentException e) {
// We weren't able to resolve the requested piece, likely because it's in a polymoprhic version
// of the path we're currently on. Let's see if there's any polymoprhic version of our class to
// use instead.
EntityManagerFactoryImpl em = ((CriteriaBuilderImpl) builder).getEntityManagerFactory();
Metamodel mm = em.getMetamodel();
boolean found = false;
Class<?>[] polyClasses = dynamicDaoHelper.getAllPolymorphicEntitiesFromCeiling(
path.getJavaType(), em.getSessionFactory(), true, true);
for (Class<?> clazz : polyClasses) {
ManagedType mt = mm.managedType(clazz);
try {
Attribute attr = mt.getAttribute(piece);
if (attr != null) {
Root additionalRoot = criteria.from(clazz);
restrictions.add(builder.equal(path, additionalRoot));
path = additionalRoot.get(piece);
found = true;
break;
}
} catch (IllegalArgumentException e2) {
// Do nothing - we'll try the next class and see if it has the attribute
}
}
if (!found) {
throw new IllegalArgumentException("Could not resolve requested attribute against path, including" +
" known polymorphic versions of the root", e);
}
}
if (path.getParentPath() != null && path.getParentPath().getJavaType().isAnnotationPresent(Embeddable.class) && path instanceof PluralAttributePath) {
//TODO this code should work, but there still appear to be bugs in Hibernate's JPA criteria handling for lists
//inside Embeddables
Class<?> myClass = ((PluralAttributePath) path).getAttribute().getClass().getInterfaces()[0];
//we don't know which version of "join" to call, so we'll let reflection figure it out
try {
From embeddedJoin = myRoot.join(((SingularAttributePath) path.getParentPath()).getAttribute());
Method join = embeddedJoin.getClass().getMethod("join", myClass);
path = (Path) join.invoke(embeddedJoin, ((PluralAttributePath) path).getAttribute());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
return path;
}
/**
* This is a workaround for HHH-6562 (https://hibernate.atlassian.net/browse/HHH-6562)
*/
@SuppressWarnings("unchecked")
private void copyCollectionPersister(String originalKey, String copyKey,
SessionFactoryImpl sessionFactory) {
try {
Field collectionPersistersField = SessionFactoryImpl.class
.getDeclaredField("collectionPersisters");
collectionPersistersField.setAccessible(true);
Map collectionPersisters = (Map) collectionPersistersField.get(sessionFactory);
if (collectionPersisters.containsKey(originalKey)) {
Object collectionPersister = collectionPersisters.get(originalKey);
collectionPersisters.put(copyKey, collectionPersister);
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
public CriteriaQuery getCriteria() {
return criteria;
}
public void setCriteria(CriteriaQuery criteria) {
this.criteria = criteria;
}
public List<Predicate> getRestrictions() {
return restrictions;
}
public void setRestrictions(List<Predicate> restrictions) {
this.restrictions = restrictions;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_criteria_FieldPathBuilder.java
|
174 |
public class BroadleafProcessURLFilterTest extends TestCase {
public void testShouldProcessURL() throws Exception {
BroadleafProcessURLFilter cf = new BroadleafProcessURLFilter();
// Should fail
assertFalse("Image resource should not be processed by content filter.", cf.shouldProcessURL(null, "/path/subpath/test.jpg"));
assertFalse("URLs containing org.broadleafcommerce.admin should not be processed.", cf.shouldProcessURL(null, "/path/org.broadleafcommerce.admin/admintest"));
assertTrue("/about_us should be processed by the content filter", cf.shouldProcessURL(null, "/about_us"));
assertTrue("*.htm resources should be processed by the content filter", cf.shouldProcessURL(null, "/test.htm"));
}
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_test_java_org_broadleafcommerce_cms_web_BroadleafProcessURLFilterTest.java
|
472 |
public interface ClientInvocationService {
<T> ICompletableFuture<T> invokeOnRandomTarget(ClientRequest request) throws Exception;
<T> ICompletableFuture<T> invokeOnTarget(ClientRequest request, Address target) throws Exception;
<T> ICompletableFuture<T> invokeOnKeyOwner(ClientRequest request, Object key) throws Exception;
<T> ICompletableFuture<T> invokeOnRandomTarget(ClientRequest request, EventHandler handler) throws Exception;
<T> ICompletableFuture<T> invokeOnTarget(ClientRequest request, Address target, EventHandler handler) throws Exception;
<T> ICompletableFuture<T> invokeOnKeyOwner(ClientRequest request, Object key, EventHandler handler) throws Exception;
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_ClientInvocationService.java
|
223 |
private class PropertyPlaceholderConfigurerResolver implements PropertyPlaceholderHelper.PlaceholderResolver {
private final Properties props;
private PropertyPlaceholderConfigurerResolver(Properties props) {
this.props = props;
}
public String resolvePlaceholder(String placeholderName) {
return RuntimeEnvironmentPropertiesConfigurer.this.resolvePlaceholder(placeholderName, props, 1);
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_config_RuntimeEnvironmentPropertiesConfigurer.java
|
652 |
public class ProductDataProvider {
/**
* A basic product is actually a Product and a Sku
*/
@DataProvider(name="basicProduct")
public static Object[][] provideBasicProduct() {
Product ci = new ProductImpl();
Sku defaultSku = new SkuImpl();
defaultSku.setName("setOfAggieDominoes");
defaultSku.setDescription("a fine set of bones for 42");
ci.setDefaultSku(defaultSku);
return new Object[][]{{ci}};
}
@DataProvider(name="setupProducts")
public static Object[][] createProducts() {
Product p1 = getProduct(null);
Product p2 = getProduct(null);
Product p3 = getProduct(null);
Product p4 = getProduct(null);
Product p5 = getProduct(null);
Product p6 = getProduct(null);
Product p7 = getProduct(null);
Object[][] objs = new Object[7][1];
objs[0] = new Object[]{p1};
objs[1] = new Object[]{p2};
objs[2] = new Object[]{p3};
objs[3] = new Object[]{p4};
objs[4] = new Object[]{p5};
objs[5] = new Object[]{p6};
objs[6] = new Object[]{p7};
return objs;
}
private static Product getProduct(Long id) {
Calendar activeStartCal = Calendar.getInstance();
activeStartCal.add(Calendar.DAY_OF_YEAR, -2);
Product product = new ProductImpl();
Sku defaultSku = new SkuImpl();
defaultSku.setRetailPrice(new Money(BigDecimal.valueOf(15.0)));
defaultSku.setSalePrice(new Money(BigDecimal.valueOf(10.0)));
defaultSku.setActiveStartDate(activeStartCal.getTime());
product.setDefaultSku(defaultSku);
if (id == null) {
defaultSku.setName("productNameTest");
return product;
}
product.setId(id);
defaultSku.setName(id.toString());
defaultSku.setId(id);
return product;
}
}
| 0true
|
integration_src_test_java_org_broadleafcommerce_core_catalog_ProductDataProvider.java
|
561 |
public class RoundRobinLB extends AbstractLoadBalancer {
private final AtomicInteger indexRef;
public RoundRobinLB() {
this((int) System.nanoTime());
}
public RoundRobinLB(int seed) {
indexRef = new AtomicInteger(seed);
}
@Override
public Member next() {
Member[] members = getMembers();
if (members == null || members.length == 0) {
return null;
}
int length = members.length;
int index = (indexRef.getAndAdd(1) % length + length) % length;
return members[index];
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_util_RoundRobinLB.java
|
55 |
public class HttpGetCommandParser implements CommandParser {
public TextCommand parser(SocketTextReader socketTextReader, String cmd, int space) {
StringTokenizer st = new StringTokenizer(cmd);
st.nextToken();
String uri = null;
if (st.hasMoreTokens()) {
uri = st.nextToken();
} else {
return new ErrorCommand(ERROR_CLIENT);
}
return new HttpGetCommand(uri);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_rest_HttpGetCommandParser.java
|
3,880 |
public class IndicesQueryParser implements QueryParser {
public static final String NAME = "indices";
@Nullable
private final ClusterService clusterService;
@Inject
public IndicesQueryParser(@Nullable ClusterService clusterService) {
this.clusterService = clusterService;
}
@Override
public String[] names() {
return new String[]{NAME};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
Query query = null;
Query noMatchQuery = Queries.newMatchAllQuery();
boolean queryFound = false;
boolean indicesFound = false;
boolean currentIndexMatchesIndices = false;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("query".equals(currentFieldName)) {
//TODO We are able to decide whether to parse the query or not only if indices in the query appears first
queryFound = true;
if (indicesFound && !currentIndexMatchesIndices) {
parseContext.parser().skipChildren(); // skip the query object without parsing it
} else {
query = parseContext.parseInnerQuery();
}
} else if ("no_match_query".equals(currentFieldName)) {
if (indicesFound && currentIndexMatchesIndices) {
parseContext.parser().skipChildren(); // skip the query object without parsing it
} else {
noMatchQuery = parseContext.parseInnerQuery();
}
} else {
throw new QueryParsingException(parseContext.index(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("indices".equals(currentFieldName)) {
if (indicesFound) {
throw new QueryParsingException(parseContext.index(), "[indices] indices or index already specified");
}
indicesFound = true;
Collection<String> indices = new ArrayList<String>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
String value = parser.textOrNull();
if (value == null) {
throw new QueryParsingException(parseContext.index(), "[indices] no value specified for 'indices' entry");
}
indices.add(value);
}
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), indices.toArray(new String[indices.size()]));
} else {
throw new QueryParsingException(parseContext.index(), "[indices] query does not support [" + currentFieldName + "]");
}
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
if (indicesFound) {
throw new QueryParsingException(parseContext.index(), "[indices] indices or index already specified");
}
indicesFound = true;
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), parser.text());
} else if ("no_match_query".equals(currentFieldName)) {
String type = parser.text();
if ("all".equals(type)) {
noMatchQuery = Queries.newMatchAllQuery();
} else if ("none".equals(type)) {
noMatchQuery = Queries.newMatchNoDocsQuery();
}
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[indices] query does not support [" + currentFieldName + "]");
}
}
}
if (!queryFound) {
throw new QueryParsingException(parseContext.index(), "[indices] requires 'query' element");
}
if (!indicesFound) {
throw new QueryParsingException(parseContext.index(), "[indices] requires 'indices' or 'index' element");
}
Query chosenQuery;
if (currentIndexMatchesIndices) {
chosenQuery = query;
} else {
chosenQuery = noMatchQuery;
}
if (queryName != null) {
parseContext.addNamedQuery(queryName, chosenQuery);
}
return chosenQuery;
}
protected boolean matchesIndices(String currentIndex, String... indices) {
final String[] concreteIndices = clusterService.state().metaData().concreteIndicesIgnoreMissing(indices);
for (String index : concreteIndices) {
if (Regex.simpleMatch(index, currentIndex)) {
return true;
}
}
return false;
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_IndicesQueryParser.java
|
1,493 |
public class AllocatePostApiFlagTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(AllocatePostApiFlagTests.class);
@Test
public void simpleFlagTests() {
AllocationService allocation = createAllocationService(settingsBuilder().put("cluster.routing.allocation.concurrent_recoveries", 10).build());
logger.info("creating an index with 1 shard, no replica");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").numberOfShards(1).numberOfReplicas(0))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
assertThat(clusterState.routingTable().index("test").shard(0).primaryAllocatedPostApi(), equalTo(false));
logger.info("adding two nodes and performing rerouting");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
RoutingAllocation.Result rerouteResult = allocation.reroute(clusterState);
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.routingTable().index("test").shard(0).primaryAllocatedPostApi(), equalTo(false));
logger.info("start primary shard");
rerouteResult = allocation.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING));
clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build();
assertThat(clusterState.routingTable().index("test").shard(0).primaryAllocatedPostApi(), equalTo(true));
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_routing_allocation_AllocatePostApiFlagTests.java
|
70 |
class AssignToIfIsProposal extends LocalProposal {
protected DocumentChange createChange(IDocument document, Node expanse,
Integer stopIndex) {
DocumentChange change =
new DocumentChange("Assign to If Is", document);
change.setEdit(new MultiTextEdit());
change.addEdit(new InsertEdit(offset, "if (is Nothing " + initialName + " = "));
String terminal = expanse.getEndToken().getText();
if (!terminal.equals(";")) {
change.addEdit(new InsertEdit(stopIndex+1, ") {}"));
exitPos = stopIndex+13;
}
else {
change.addEdit(new ReplaceEdit(stopIndex, 1, ") {}"));
exitPos = stopIndex+12;
}
return change;
}
public AssignToIfIsProposal(Tree.CompilationUnit cu,
Node node, int currentOffset) {
super(cu, node, currentOffset);
}
protected void addLinkedPositions(IDocument document, Unit unit)
throws BadLocationException {
ProposalPosition typePosition =
new ProposalPosition(document, offset+7, 7, 1,
getCaseTypeProposals(offset+7, unit, type));
ProposalPosition namePosition =
new ProposalPosition(document, offset+15, initialName.length(), 0,
getNameProposals(offset+15, 1, nameProposals));
LinkedMode.addLinkedPosition(linkedModeModel, typePosition);
LinkedMode.addLinkedPosition(linkedModeModel, namePosition);
}
@Override
String[] computeNameProposals(Node expression) {
return super.computeNameProposals(expression);
}
@Override
public String getDisplayString() {
return "Assign expression to 'if (is)' condition";
}
@Override
boolean isEnabled(ProducedType resultType) {
return true;
}
static void addAssignToIfIsProposal(Tree.CompilationUnit cu,
Collection<ICompletionProposal> proposals,
Node node, int currentOffset) {
AssignToIfIsProposal prop =
new AssignToIfIsProposal(cu, node, currentOffset);
if (prop.isEnabled()) {
proposals.add(prop);
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_AssignToIfIsProposal.java
|
1,137 |
public class OSQLMethodAsInteger extends OAbstractSQLMethod {
public static final String NAME = "asinteger";
public OSQLMethodAsInteger() {
super(NAME);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult instanceof Number) {
ioResult = ((Number) ioResult).intValue();
} else {
ioResult = ioResult != null ? new Integer(ioResult.toString().trim()) : null;
}
return ioResult;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsInteger.java
|
753 |
public class TxnSetSizeRequest extends TxnCollectionRequest {
public TxnSetSizeRequest() {
}
public TxnSetSizeRequest(String name) {
super(name);
}
@Override
public Object innerCall() throws Exception {
return getEndpoint().getTransactionContext(txnId).getSet(name).size();
}
@Override
public String getServiceName() {
return SetService.SERVICE_NAME;
}
@Override
public int getClassId() {
return CollectionPortableHook.TXN_SET_SIZE;
}
@Override
public Permission getRequiredPermission() {
return new SetPermission(name, ActionConstants.ACTION_READ);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_client_TxnSetSizeRequest.java
|
3,211 |
constructors[REPL_CLEAR_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
public IdentifiedDataSerializable createNew(Integer arg) {
return new VectorClock();
}
};
| 1no label
|
hazelcast_src_main_java_com_hazelcast_replicatedmap_operation_ReplicatedMapDataSerializerHook.java
|
1,729 |
operation.setResponseHandler(new ResponseHandler() {
@Override
public void sendResponse(Object obj) {
if (checkIfMapLoaded.decrementAndGet() == 0) {
loaded.set(true);
}
}
public boolean isLocal() {
return true;
}
});
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_DefaultRecordStore.java
|
3,967 |
public class TermsFilterParser implements FilterParser {
public static final String NAME = "terms";
private IndicesTermsFilterCache termsFilterCache;
@Inject
public TermsFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "in"};
}
@Inject(optional = true)
public void setIndicesTermsFilterCache(IndicesTermsFilterCache termsFilterCache) {
this.termsFilterCache = termsFilterCache;
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
MapperService.SmartNameFieldMappers smartNameFieldMappers;
Boolean cache = null;
String filterName = null;
String currentFieldName = null;
String lookupIndex = parseContext.index().name();
String lookupType = null;
String lookupId = null;
String lookupPath = null;
String lookupRouting = null;
boolean lookupCache = true;
CacheKeyFilter.Key cacheKey = null;
XContentParser.Token token;
String execution = "plain";
List<Object> terms = Lists.newArrayList();
String fieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Object value = parser.objectBytes();
if (value == null) {
throw new QueryParsingException(parseContext.index(), "No value specified for terms filter");
}
terms.add(value);
}
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
lookupIndex = parser.text();
} else if ("type".equals(currentFieldName)) {
lookupType = parser.text();
} else if ("id".equals(currentFieldName)) {
lookupId = parser.text();
} else if ("path".equals(currentFieldName)) {
lookupPath = parser.text();
} else if ("routing".equals(currentFieldName)) {
lookupRouting = parser.textOrNull();
} else if ("cache".equals(currentFieldName)) {
lookupCache = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext.index(), "[terms] filter does not support [" + currentFieldName + "] within lookup element");
}
}
}
if (lookupType == null) {
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the type");
}
if (lookupId == null) {
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the id");
}
if (lookupPath == null) {
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the path");
}
} else if (token.isValue()) {
if ("execution".equals(currentFieldName)) {
execution = parser.text();
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else {
throw new QueryParsingException(parseContext.index(), "[terms] filter does not support [" + currentFieldName + "]");
}
}
}
if (fieldName == null) {
throw new QueryParsingException(parseContext.index(), "terms filter requires a field name, followed by array of terms");
}
FieldMapper fieldMapper = null;
smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
String[] previousTypes = null;
if (smartNameFieldMappers != null) {
if (smartNameFieldMappers.hasMapper()) {
fieldMapper = smartNameFieldMappers.mapper();
fieldName = fieldMapper.names().indexName();
}
// if we have a doc mapper, its explicit type, mark it
if (smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{smartNameFieldMappers.docMapper().type()});
}
}
if (lookupId != null) {
// if there are no mappings, then nothing has been indexing yet against this shard, so we can return
// no match (but not cached!), since the Terms Lookup relies on the fact that there are mappings...
if (fieldMapper == null) {
return Queries.MATCH_NO_FILTER;
}
// external lookup, use it
TermsLookup termsLookup = new TermsLookup(fieldMapper, lookupIndex, lookupType, lookupId, lookupRouting, lookupPath, parseContext);
Filter filter = termsFilterCache.termsFilter(termsLookup, lookupCache, cacheKey);
if (filter == null) {
return null;
}
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
return filter;
}
if (terms.isEmpty()) {
return Queries.MATCH_NO_FILTER;
}
try {
Filter filter;
if ("plain".equals(execution)) {
if (fieldMapper != null) {
filter = fieldMapper.termsFilter(terms, parseContext);
} else {
BytesRef[] filterValues = new BytesRef[terms.size()];
for (int i = 0; i < filterValues.length; i++) {
filterValues[i] = BytesRefs.toBytesRef(terms.get(i));
}
filter = new TermsFilter(fieldName, filterValues);
}
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("fielddata".equals(execution)) {
// if there are no mappings, then nothing has been indexing yet against this shard, so we can return
// no match (but not cached!), since the FieldDataTermsFilter relies on a mapping...
if (fieldMapper == null) {
return Queries.MATCH_NO_FILTER;
}
filter = fieldMapper.termsFilter(parseContext.fieldData(), terms, parseContext);
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("bool".equals(execution)) {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (Object term : terms) {
boolFiler.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null), BooleanClause.Occur.SHOULD);
}
} else {
for (Object term : terms) {
boolFiler.add(parseContext.cacheFilter(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), null), BooleanClause.Occur.SHOULD);
}
}
filter = boolFiler;
// only cache if explicitly told to, since we cache inner filters
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("bool_nocache".equals(execution)) {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (Object term : terms) {
boolFiler.add(fieldMapper.termFilter(term, parseContext), BooleanClause.Occur.SHOULD);
}
} else {
for (Object term : terms) {
boolFiler.add(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD);
}
}
filter = boolFiler;
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("and".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null));
}
} else {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), null));
}
}
filter = new AndFilter(filters);
// only cache if explicitly told to, since we cache inner filters
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("and_nocache".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(fieldMapper.termFilter(term, parseContext));
}
} else {
for (Object term : terms) {
filters.add(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))));
}
}
filter = new AndFilter(filters);
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("or".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null));
}
} else {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), null));
}
}
filter = new OrFilter(filters);
// only cache if explicitly told to, since we cache inner filters
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("or_nocache".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(fieldMapper.termFilter(term, parseContext));
}
} else {
for (Object term : terms) {
filters.add(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))));
}
}
filter = new OrFilter(filters);
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else {
throw new QueryParsingException(parseContext.index(), "terms filter execution value [" + execution + "] not supported");
}
filter = wrapSmartNameFilter(filter, smartNameFieldMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
} finally {
if (smartNameFieldMappers != null && smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
QueryParseContext.setTypes(previousTypes);
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_query_TermsFilterParser.java
|
315 |
new Thread() {
public void run() {
map.tryPut(key, newValue, 8, TimeUnit.SECONDS);
tryPutReturned.countDown();
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapLockTest.java
|
253 |
static final class RateLimitedIndexOutput extends BufferedIndexOutput {
private final IndexOutput delegate;
private final BufferedIndexOutput bufferedDelegate;
private final RateLimiter rateLimiter;
private final StoreRateLimiting.Listener rateListener;
RateLimitedIndexOutput(final RateLimiter rateLimiter, final StoreRateLimiting.Listener rateListener, final IndexOutput delegate) {
super(delegate instanceof BufferedIndexOutput ? ((BufferedIndexOutput) delegate).getBufferSize() : BufferedIndexOutput.DEFAULT_BUFFER_SIZE);
if (delegate instanceof BufferedIndexOutput) {
bufferedDelegate = (BufferedIndexOutput) delegate;
this.delegate = delegate;
} else {
this.delegate = delegate;
bufferedDelegate = null;
}
this.rateLimiter = rateLimiter;
this.rateListener = rateListener;
}
@Override
protected void flushBuffer(byte[] b, int offset, int len) throws IOException {
rateListener.onPause(rateLimiter.pause(len));
if (bufferedDelegate != null) {
bufferedDelegate.flushBuffer(b, offset, len);
} else {
delegate.writeBytes(b, offset, len);
}
}
@Override
public long length() throws IOException {
return delegate.length();
}
@Override
public void seek(long pos) throws IOException {
flush();
delegate.seek(pos);
}
@Override
public void flush() throws IOException {
try {
super.flush();
} finally {
delegate.flush();
}
}
@Override
public void setLength(long length) throws IOException {
delegate.setLength(length);
}
@Override
public void close() throws IOException {
try {
super.close();
} finally {
delegate.close();
}
}
}
| 1no label
|
src_main_java_org_apache_lucene_store_RateLimitedFSDirectory.java
|
2,363 |
private class SingleExecutionProcessor
implements Runnable {
@Override
public void run() {
try {
RequestPartitionResult result = mapReduceService
.processRequest(supervisor.getJobOwner(), new RequestMemberIdAssignment(name, jobId), name);
// JobSupervisor doesn't exists anymore on jobOwner, job done?
if (result.getResultState() == NO_SUPERVISOR) {
return;
} else if (result.getResultState() == NO_MORE_PARTITIONS) {
return;
}
int partitionId = result.getPartitionId();
KeyValueSource<KeyIn, ValueIn> delegate = keyValueSource;
if (supervisor.getConfiguration().isCommunicateStats()) {
delegate = new KeyValueSourceFacade<KeyIn, ValueIn>(keyValueSource, supervisor);
}
delegate.reset();
if (delegate.open(nodeEngine)) {
DefaultContext<KeyOut, ValueOut> context = supervisor.getOrCreateContext(MapCombineTask.this);
processMapping(partitionId, context, delegate);
delegate.close();
finalizeMapping(partitionId, context);
} else {
// Partition assignment might not be ready yet, postpone the processing and retry later
postponePartitionProcessing(partitionId);
}
} catch (Throwable t) {
handleProcessorThrowable(t);
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_task_MapCombineTask.java
|
130 |
@Entity
@Table(name = "BLC_QUAL_CRIT_SC_XREF")
@Inheritance(strategy=InheritanceType.JOINED)
public class CriteriaStructuredContentXref {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The category id. */
@EmbeddedId
CriteriaStructuredContentXrefPK criteriaStructuredContentXrefPK = new CriteriaStructuredContentXrefPK();
public CriteriaStructuredContentXrefPK getCriteriaStructuredContentXrefPK() {
return criteriaStructuredContentXrefPK;
}
public void setCriteriaStructuredContentXrefPK(final CriteriaStructuredContentXrefPK criteriaStructuredContentXrefPK) {
this.criteriaStructuredContentXrefPK = criteriaStructuredContentXrefPK;
}
public static class CriteriaStructuredContentXrefPK implements Serializable {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
@ManyToOne(targetEntity = StructuredContentImpl.class, optional=false)
@JoinColumn(name = "SC_ID")
protected StructuredContent structuredContent = new StructuredContentImpl();
@ManyToOne(targetEntity = StructuredContentItemCriteriaImpl.class, optional=false)
@JoinColumn(name = "SC_ITEM_CRITERIA_ID")
protected StructuredContentItemCriteria structuredContentItemCriteria = new StructuredContentItemCriteriaImpl();
public StructuredContent getStructuredContent() {
return structuredContent;
}
public void setStructuredContent(StructuredContent structuredContent) {
this.structuredContent = structuredContent;
}
public StructuredContentItemCriteria getStructuredContentItemCriteria() {
return structuredContentItemCriteria;
}
public void setStructuredContentItemCriteria(StructuredContentItemCriteria structuredContentItemCriteria) {
this.structuredContentItemCriteria = structuredContentItemCriteria;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((structuredContent == null) ? 0 : structuredContent.hashCode());
result = prime * result + ((structuredContentItemCriteria == null) ? 0 : structuredContentItemCriteria.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CriteriaStructuredContentXrefPK other = (CriteriaStructuredContentXrefPK) obj;
if (structuredContent == null) {
if (other.structuredContent != null)
return false;
} else if (!structuredContent.equals(other.structuredContent))
return false;
if (structuredContentItemCriteria == null) {
if (other.structuredContentItemCriteria != null)
return false;
} else if (!structuredContentItemCriteria.equals(other.structuredContentItemCriteria))
return false;
return true;
}
}
}
| 1no label
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_structure_domain_CriteriaStructuredContentXref.java
|
994 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_FULFILLMENT_GROUP")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY,
booleanOverrideValue = true)),
@AdminPresentationMergeOverride(name = "currency", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.PROMINENT,
booleanOverrideValue = false)),
@AdminPresentationMergeOverride(name = "personalMessage", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.TAB,
overrideValue = FulfillmentGroupImpl.Presentation.Tab.Name.Advanced),
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.TABORDER,
intOverrideValue = FulfillmentGroupImpl.Presentation.Tab.Order.Advanced)
}),
@AdminPresentationMergeOverride(name = "address", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.TAB,
overrideValue = FulfillmentGroupImpl.Presentation.Tab.Name.Address),
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.TABORDER,
intOverrideValue = FulfillmentGroupImpl.Presentation.Tab.Order.Address)
}),
@AdminPresentationMergeOverride(name = "address.isDefault", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED,
booleanOverrideValue = true)
}),
@AdminPresentationMergeOverride(name = "address.isActive", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED,
booleanOverrideValue = true)
}),
@AdminPresentationMergeOverride(name = "address.isBusiness", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED,
booleanOverrideValue = true)
}),
@AdminPresentationMergeOverride(name = "phone", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED,
booleanOverrideValue = true)
}),
@AdminPresentationMergeOverride(name = "phone.phoneNumber", mergeEntries = {
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.EXCLUDED,
booleanOverrideValue = false),
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.ORDER,
intOverrideValue = FulfillmentGroupImpl.Presentation.FieldOrder.PHONE),
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.GROUP,
overrideValue = "General"),
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.REQUIREDOVERRIDE,
overrideValue = "NOT_REQUIRED")
})
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "FulfillmentGroupImpl_baseFulfillmentGroup")
public class FulfillmentGroupImpl implements FulfillmentGroup, CurrencyCodeIdentifiable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "FulfillmentGroupId")
@GenericGenerator(
name="FulfillmentGroupId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="FulfillmentGroupImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.FulfillmentGroupImpl")
}
)
@Column(name = "FULFILLMENT_GROUP_ID")
protected Long id;
@Column(name = "REFERENCE_NUMBER")
@Index(name="FG_REFERENCE_INDEX", columnNames={"REFERENCE_NUMBER"})
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Reference_Number", order=Presentation.FieldOrder.REFNUMBER,
groupOrder = Presentation.Group.Order.General)
protected String referenceNumber;
@Column(name = "METHOD")
@Index(name="FG_METHOD_INDEX", columnNames={"METHOD"})
@AdminPresentation(excluded = true)
@Deprecated
protected String method;
@Column(name = "SERVICE")
@Index(name="FG_SERVICE_INDEX", columnNames={"SERVICE"})
@AdminPresentation(excluded = true)
@Deprecated
protected String service;
@Column(name = "RETAIL_PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_Retail_Shipping_Price", order=Presentation.FieldOrder.RETAIL,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal retailFulfillmentPrice;
@Column(name = "SALE_PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_Sale_Shipping_Price", order=Presentation.FieldOrder.SALE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal saleFulfillmentPrice;
@Column(name = "PRICE", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_Shipping_Price", order=Presentation.FieldOrder.PRICE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal fulfillmentPrice;
@Column(name = "TYPE")
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Type", order=Presentation.FieldOrder.TYPE,
fieldType=SupportedFieldType.BROADLEAF_ENUMERATION,
broadleafEnumeration="org.broadleafcommerce.core.order.service.type.FulfillmentType",
prominent = true, gridOrder = 3000)
protected String type;
@Column(name = "TOTAL_TAX", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Total_Tax", order=Presentation.FieldOrder.TOTALTAX,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal totalTax;
@Column(name = "TOTAL_ITEM_TAX", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Total_Item_Tax", order=Presentation.FieldOrder.ITEMTAX,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal totalItemTax;
@Column(name = "TOTAL_FEE_TAX", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Total_Fee_Tax", order=Presentation.FieldOrder.FEETAX,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal totalFeeTax;
@Column(name = "TOTAL_FG_TAX", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Total_FG_Tax", order=Presentation.FieldOrder.FGTAX,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal totalFulfillmentGroupTax;
@Column(name = "DELIVERY_INSTRUCTION")
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Delivery_Instruction", order=Presentation.FieldOrder.DELIVERINSTRUCTION)
protected String deliveryInstruction;
@Column(name = "IS_PRIMARY")
@Index(name="FG_PRIMARY_INDEX", columnNames={"IS_PRIMARY"})
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_Primary_FG", order=Presentation.FieldOrder.PRIMARY)
protected boolean primary = false;
@Column(name = "MERCHANDISE_TOTAL", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Merchandise_Total", order=Presentation.FieldOrder.MERCHANDISETOTAL,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType=SupportedFieldType.MONEY)
protected BigDecimal merchandiseTotal;
@Column(name = "TOTAL", precision=19, scale=5)
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Total", order=Presentation.FieldOrder.TOTAL,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing,
fieldType= SupportedFieldType.MONEY, prominent = true, gridOrder = 2000)
protected BigDecimal total;
@Column(name = "STATUS")
@Index(name="FG_STATUS_INDEX", columnNames={"STATUS"})
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_FG_Status", order=Presentation.FieldOrder.STATUS,
fieldType=SupportedFieldType.BROADLEAF_ENUMERATION,
broadleafEnumeration="org.broadleafcommerce.core.order.service.type.FulfillmentGroupStatusType",
prominent = true, gridOrder = 4000)
protected String status;
@Column(name = "SHIPPING_PRICE_TAXABLE")
@AdminPresentation(friendlyName = "FulfillmentGroupImpl_Shipping_Price_Taxable", order=Presentation.FieldOrder.TAXABLE,
group = Presentation.Group.Name.Pricing, groupOrder = Presentation.Group.Order.Pricing,
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing)
protected Boolean isShippingPriceTaxable = Boolean.FALSE;
@ManyToOne(targetEntity = FulfillmentOptionImpl.class, cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name = "FULFILLMENT_OPTION_ID")
protected FulfillmentOption fulfillmentOption;
@ManyToOne(targetEntity = OrderImpl.class, optional=false)
@JoinColumn(name = "ORDER_ID")
@Index(name="FG_ORDER_INDEX", columnNames={"ORDER_ID"})
@AdminPresentation(excluded = true)
protected Order order;
@Column(name = "FULFILLMENT_GROUP_SEQUNCE")
protected Integer sequence;
@ManyToOne(targetEntity = AddressImpl.class, cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name = "ADDRESS_ID")
@Index(name="FG_ADDRESS_INDEX", columnNames={"ADDRESS_ID"})
protected Address address;
@ManyToOne(targetEntity = PhoneImpl.class, cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name = "PHONE_ID")
@Index(name="FG_PHONE_INDEX", columnNames={"PHONE_ID"})
protected Phone phone;
@ManyToOne(targetEntity = PersonalMessageImpl.class, cascade = { CascadeType.ALL })
@JoinColumn(name = "PERSONAL_MESSAGE_ID")
@Index(name="FG_MESSAGE_INDEX", columnNames={"PERSONAL_MESSAGE_ID"})
protected PersonalMessage personalMessage;
@OneToMany(mappedBy = "fulfillmentGroup", targetEntity = FulfillmentGroupItemImpl.class, cascade = CascadeType.ALL,
orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="FulfillmentGroupImpl_Items",
tab = Presentation.Tab.Name.Items, tabOrder = Presentation.Tab.Order.Items)
protected List<FulfillmentGroupItem> fulfillmentGroupItems = new ArrayList<FulfillmentGroupItem>();
@OneToMany(mappedBy = "fulfillmentGroup", targetEntity = FulfillmentGroupFeeImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "blOrderElements")
@AdminPresentationCollection(friendlyName="FulfillmentGroupImpl_Fees",
tab = Presentation.Tab.Name.Pricing, tabOrder = Presentation.Tab.Order.Pricing)
protected List<FulfillmentGroupFee> fulfillmentGroupFees = new ArrayList<FulfillmentGroupFee>();
@OneToMany(mappedBy = "fulfillmentGroup", targetEntity = CandidateFulfillmentGroupOfferImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
protected List<CandidateFulfillmentGroupOffer> candidateOffers = new ArrayList<CandidateFulfillmentGroupOffer>();
@OneToMany(mappedBy = "fulfillmentGroup", targetEntity = FulfillmentGroupAdjustmentImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="FulfillmentGroupImpl_Adjustments",
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced)
protected List<FulfillmentGroupAdjustment> fulfillmentGroupAdjustments = new ArrayList<FulfillmentGroupAdjustment>();
@OneToMany(fetch = FetchType.LAZY, targetEntity = TaxDetailImpl.class, cascade = { CascadeType.ALL }, orphanRemoval = true)
@JoinTable(name = "BLC_FG_FG_TAX_XREF", joinColumns = @JoinColumn(name = "FULFILLMENT_GROUP_ID"),
inverseJoinColumns = @JoinColumn(name = "TAX_DETAIL_ID"))
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
protected List<TaxDetail> taxes = new ArrayList<TaxDetail>();
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Order getOrder() {
return order;
}
@Override
public void setOrder(Order order) {
this.order = order;
}
@Override
public FulfillmentOption getFulfillmentOption() {
return fulfillmentOption;
}
@Override
public void setFulfillmentOption(FulfillmentOption fulfillmentOption) {
this.fulfillmentOption = fulfillmentOption;
}
@Override
public String getReferenceNumber() {
return referenceNumber;
}
@Override
public void setReferenceNumber(String referenceNumber) {
this.referenceNumber = referenceNumber;
}
@Override
public List<FulfillmentGroupItem> getFulfillmentGroupItems() {
return fulfillmentGroupItems;
}
@Override
public List<DiscreteOrderItem> getDiscreteOrderItems() {
List<DiscreteOrderItem> discreteOrderItems = new ArrayList<DiscreteOrderItem>();
for (FulfillmentGroupItem fgItem : fulfillmentGroupItems) {
OrderItem orderItem = fgItem.getOrderItem();
if (orderItem instanceof BundleOrderItem) {
BundleOrderItemImpl bundleOrderItem = (BundleOrderItemImpl)orderItem;
for (DiscreteOrderItem discreteOrderItem : bundleOrderItem.getDiscreteOrderItems()) {
discreteOrderItems.add(discreteOrderItem);
}
} else if (orderItem instanceof DiscreteOrderItem) {
DiscreteOrderItem discreteOrderItem = (DiscreteOrderItem)orderItem;
discreteOrderItems.add(discreteOrderItem);
}
}
return discreteOrderItems;
}
@Override
public void setFulfillmentGroupItems(List<FulfillmentGroupItem> fulfillmentGroupItems) {
this.fulfillmentGroupItems = fulfillmentGroupItems;
}
@Override
public void addFulfillmentGroupItem(FulfillmentGroupItem fulfillmentGroupItem) {
if (this.fulfillmentGroupItems == null) {
this.fulfillmentGroupItems = new Vector<FulfillmentGroupItem>();
}
this.fulfillmentGroupItems.add(fulfillmentGroupItem);
}
@Override
public Address getAddress() {
return address;
}
@Override
public void setAddress(Address address) {
this.address = address;
}
@Override
public Phone getPhone() {
return phone;
}
@Override
public void setPhone(Phone phone) {
this.phone = phone;
}
@Override
@Deprecated
public String getMethod() {
return method;
}
@Override
@Deprecated
public void setMethod(String fulfillmentMethod) {
this.method = fulfillmentMethod;
}
@Override
public Money getRetailFulfillmentPrice() {
return retailFulfillmentPrice == null ? null :
BroadleafCurrencyUtils.getMoney(retailFulfillmentPrice, getOrder().getCurrency());
}
@Override
public void setRetailFulfillmentPrice(Money retailFulfillmentPrice) {
this.retailFulfillmentPrice = Money.toAmount(retailFulfillmentPrice);
}
@Override
public Money getRetailShippingPrice() {
return getRetailFulfillmentPrice();
}
@Override
public void setRetailShippingPrice(Money retailShippingPrice) {
setRetailFulfillmentPrice(retailShippingPrice);
}
@Override
public FulfillmentType getType() {
return FulfillmentType.getInstance(type);
}
@Override
public void setType(FulfillmentType type) {
this.type = type == null ? null : type.getType();
}
@Override
public void addCandidateFulfillmentGroupOffer(CandidateFulfillmentGroupOffer candidateOffer) {
candidateOffers.add(candidateOffer);
}
@Override
public List<CandidateFulfillmentGroupOffer> getCandidateFulfillmentGroupOffers() {
return candidateOffers;
}
@Override
public void setCandidateFulfillmentGroupOffer(List<CandidateFulfillmentGroupOffer> candidateOffers) {
this.candidateOffers = candidateOffers;
}
@Override
public void removeAllCandidateOffers() {
if (candidateOffers != null) {
for (CandidateFulfillmentGroupOffer offer : candidateOffers) {
offer.setFulfillmentGroup(null);
}
candidateOffers.clear();
}
}
@Override
public List<FulfillmentGroupAdjustment> getFulfillmentGroupAdjustments() {
return this.fulfillmentGroupAdjustments;
}
@Override
public Money getFulfillmentGroupAdjustmentsValue() {
Money adjustmentsValue = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getOrder().getCurrency());
for (FulfillmentGroupAdjustment adjustment : fulfillmentGroupAdjustments) {
adjustmentsValue = adjustmentsValue.add(adjustment.getValue());
}
return adjustmentsValue;
}
@Override
public void removeAllAdjustments() {
if (fulfillmentGroupAdjustments != null) {
for (FulfillmentGroupAdjustment adjustment : fulfillmentGroupAdjustments) {
adjustment.setFulfillmentGroup(null);
}
fulfillmentGroupAdjustments.clear();
}
}
@Override
public void setFulfillmentGroupAdjustments(List<FulfillmentGroupAdjustment> fulfillmentGroupAdjustments) {
this.fulfillmentGroupAdjustments = fulfillmentGroupAdjustments;
}
@Override
public Money getSaleFulfillmentPrice() {
return saleFulfillmentPrice == null ? null : BroadleafCurrencyUtils.getMoney(saleFulfillmentPrice,
getOrder().getCurrency());
}
@Override
public void setSaleFulfillmentPrice(Money saleFulfillmentPrice) {
this.saleFulfillmentPrice = Money.toAmount(saleFulfillmentPrice);
}
@Override
public Money getSaleShippingPrice() {
return getSaleFulfillmentPrice();
}
@Override
public void setSaleShippingPrice(Money saleShippingPrice) {
setSaleFulfillmentPrice(saleShippingPrice);
}
@Override
public Money getFulfillmentPrice() {
return fulfillmentPrice == null ? null : BroadleafCurrencyUtils.getMoney(fulfillmentPrice,
getOrder().getCurrency());
}
@Override
public void setFulfillmentPrice(Money fulfillmentPrice) {
this.fulfillmentPrice = Money.toAmount(fulfillmentPrice);
}
@Override
public Money getShippingPrice() {
return getFulfillmentPrice();
}
@Override
public void setShippingPrice(Money shippingPrice) {
setFulfillmentPrice(shippingPrice);
}
@Override
public List<TaxDetail> getTaxes() {
return taxes;
}
@Override
public void setTaxes(List<TaxDetail> taxes) {
this.taxes = taxes;
}
@Override
public Money getTotalTax() {
return totalTax == null ? null : BroadleafCurrencyUtils.getMoney(totalTax, getOrder().getCurrency());
}
@Override
public void setTotalTax(Money totalTax) {
this.totalTax = Money.toAmount(totalTax);
}
@Override
public Money getTotalItemTax() {
return totalItemTax == null ? null : BroadleafCurrencyUtils.getMoney(totalItemTax, getOrder().getCurrency());
}
@Override
public void setTotalItemTax(Money totalItemTax) {
this.totalItemTax = Money.toAmount(totalItemTax);
}
@Override
public Money getTotalFeeTax() {
return totalFeeTax == null ? null : BroadleafCurrencyUtils.getMoney(totalFeeTax, getOrder().getCurrency());
}
@Override
public void setTotalFeeTax(Money totalFeeTax) {
this.totalFeeTax = Money.toAmount(totalFeeTax);
}
@Override
public Money getTotalFulfillmentGroupTax() {
return totalFulfillmentGroupTax == null ? null : BroadleafCurrencyUtils.getMoney(totalFulfillmentGroupTax,
getOrder().getCurrency());
}
@Override
public void setTotalFulfillmentGroupTax(Money totalFulfillmentGroupTax) {
this.totalFulfillmentGroupTax = Money.toAmount(totalFulfillmentGroupTax);
}
@Override
public String getDeliveryInstruction() {
return deliveryInstruction;
}
@Override
public void setDeliveryInstruction(String deliveryInstruction) {
this.deliveryInstruction = deliveryInstruction;
}
@Override
public PersonalMessage getPersonalMessage() {
return personalMessage;
}
@Override
public void setPersonalMessage(PersonalMessage personalMessage) {
this.personalMessage = personalMessage;
}
@Override
public boolean isPrimary() {
return primary;
}
@Override
public void setPrimary(boolean primary) {
this.primary = primary;
}
@Override
public Money getMerchandiseTotal() {
return merchandiseTotal == null ? null : BroadleafCurrencyUtils.getMoney(merchandiseTotal,
getOrder().getCurrency());
}
@Override
public void setMerchandiseTotal(Money merchandiseTotal) {
this.merchandiseTotal = Money.toAmount(merchandiseTotal);
}
@Override
public Money getTotal() {
return total == null ? null : BroadleafCurrencyUtils.getMoney(total, getOrder().getCurrency());
}
@Override
public void setTotal(Money orderTotal) {
this.total = Money.toAmount(orderTotal);
}
@Override
public FulfillmentGroupStatusType getStatus() {
return FulfillmentGroupStatusType.getInstance(status);
}
@Override
public void setStatus(FulfillmentGroupStatusType status) {
this.status = status.getType();
}
@Override
public List<FulfillmentGroupFee> getFulfillmentGroupFees() {
return fulfillmentGroupFees;
}
@Override
public void setFulfillmentGroupFees(List<FulfillmentGroupFee> fulfillmentGroupFees) {
this.fulfillmentGroupFees = fulfillmentGroupFees;
}
@Override
public void addFulfillmentGroupFee(FulfillmentGroupFee fulfillmentGroupFee) {
if (fulfillmentGroupFees == null) {
fulfillmentGroupFees = new ArrayList<FulfillmentGroupFee>();
}
fulfillmentGroupFees.add(fulfillmentGroupFee);
}
@Override
public void removeAllFulfillmentGroupFees() {
if (fulfillmentGroupFees != null) {
fulfillmentGroupFees.clear();
}
}
@Override
public Boolean isShippingPriceTaxable() {
return isShippingPriceTaxable;
}
@Override
public void setIsShippingPriceTaxable(Boolean isShippingPriceTaxable) {
this.isShippingPriceTaxable = isShippingPriceTaxable;
}
@Override
public void setSequence(Integer sequence) {
this.sequence = sequence;
}
@Override
public Integer getSequence() {
return this.sequence;
}
@Override
@Deprecated
public String getService() {
return service;
}
@Override
@Deprecated
public void setService(String service) {
this.service = service;
}
@Override
public String getCurrencyCode() {
if (getOrder().getCurrency() != null) {
return getOrder().getCurrency().getCurrencyCode();
}
return null;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((address == null) ? 0 : address.hashCode());
result = prime * result + ((fulfillmentGroupItems == null) ? 0 : fulfillmentGroupItems.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
FulfillmentGroupImpl other = (FulfillmentGroupImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (address == null) {
if (other.address != null) {
return false;
}
} else if (!address.equals(other.address)) {
return false;
}
if (fulfillmentGroupItems == null) {
if (other.fulfillmentGroupItems != null) {
return false;
}
} else if (!fulfillmentGroupItems.equals(other.fulfillmentGroupItems)) {
return false;
}
return true;
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String Items = "FulfillmentGroupImpl_Items_Tab";
public static final String Pricing = "FulfillmentGroupImpl_Pricing_Tab";
public static final String Address = "FulfillmentGroupImpl_Address_Tab";
public static final String Advanced = "FulfillmentGroupImpl_Advanced_Tab";
}
public static class Order {
public static final int Items = 2000;
public static final int Pricing = 3000;
public static final int Address = 4000;
public static final int Advanced = 5000;
}
}
public static class Group {
public static class Name {
public static final String Pricing = "FulfillmentGroupImpl_Pricing";
}
public static class Order {
public static final int General = 1000;
public static final int Pricing = 2000;
}
}
public static class FieldOrder {
public static final int REFNUMBER = 3000;
public static final int STATUS = 4000;
public static final int TYPE = 5000;
public static final int DELIVERINSTRUCTION = 6000;
public static final int PRIMARY = 7000;
public static final int PHONE = 8000;
public static final int RETAIL = 1000;
public static final int SALE = 2000;
public static final int PRICE = 3000;
public static final int ITEMTAX = 4000;
public static final int FEETAX = 5000;
public static final int FGTAX = 6000;
public static final int TOTALTAX = 7000;
public static final int MERCHANDISETOTAL = 8000;
public static final int TOTAL = 9000;
public static final int TAXABLE = 10000;
}
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_FulfillmentGroupImpl.java
|
438 |
@RunWith(HazelcastParallelClassRunner.class)
@Category(QuickTest.class)
public class ProxyEqualityTest {
private static final String atomicName = "foo";
private static final String groupAName = "GroupA";
private static final String groupBName = "GroupB";
static HazelcastInstance client1GroupA;
static HazelcastInstance client2GroupA;
static HazelcastInstance server1GroupA;
static HazelcastInstance client1GroupB;
static HazelcastInstance server1GroupB;
@BeforeClass
public static void setup() throws Exception {
Config config = new Config();
config.getGroupConfig().setName(groupAName);
server1GroupA = Hazelcast.newHazelcastInstance(config);
ClientConfig clientConfig = new ClientConfig();
clientConfig.setGroupConfig( new GroupConfig(config.getGroupConfig().getName()) );
client1GroupA = HazelcastClient.newHazelcastClient(clientConfig);
client2GroupA = HazelcastClient.newHazelcastClient(clientConfig);
//setup Group B
config = new Config();
config.getGroupConfig().setName(groupBName);
server1GroupB = Hazelcast.newHazelcastInstance(config);
clientConfig = new ClientConfig();
clientConfig.setGroupConfig( new GroupConfig(config.getGroupConfig().getName()) );
client1GroupB = HazelcastClient.newHazelcastClient(clientConfig);
}
@AfterClass
public static void cleanup() throws Exception {
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test
public void testTwoClientProxiesFromTheSameInstanceAreEquals() {
ClientProxy ref1 = (ClientProxy) client1GroupA.getAtomicLong(atomicName);
ClientProxy ref2 = (ClientProxy) client1GroupA.getAtomicLong(atomicName);
assertEquals(ref1, ref2);
}
@Test
public void testProxiesAreCached() {
ClientProxy ref1 = (ClientProxy) client1GroupA.getAtomicLong(atomicName);
ClientProxy ref2 = (ClientProxy) client1GroupA.getAtomicLong(atomicName);
assertSame(ref1, ref2);
}
@Test
public void testTwoClientProxiesFromDifferentInstancesAreNotEquals() {
ClientProxy ref1 = (ClientProxy) client1GroupA.getAtomicLong(atomicName);
ClientProxy ref2 = (ClientProxy) client1GroupB.getAtomicLong(atomicName);
assertNotEquals(ref1, ref2);
}
@Test
public void testTwoClientProxiesFromTwoDifferentClientsConnectedToTheSameInstanceAreNotEquals() {
ClientProxy ref1 = (ClientProxy) client1GroupA.getAtomicLong(atomicName);
ClientProxy ref2 = (ClientProxy) client2GroupA.getAtomicLong(atomicName);
assertNotEquals(ref1, ref2);
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_proxy_ProxyEqualityTest.java
|
1,020 |
@Entity
@EntityListeners(value = { AuditableListener.class, OrderPersistedEntityListener.class })
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_ORDER")
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationMergeOverrides(
{
@AdminPresentationMergeOverride(name = "", mergeEntries =
@AdminPresentationMergeEntry(propertyType = PropertyType.AdminPresentation.READONLY,
booleanOverrideValue = true))
}
)
@AdminPresentationClass(populateToOneFields = PopulateToOneFieldsEnum.TRUE, friendlyName = "OrderImpl_baseOrder")
public class OrderImpl implements Order, AdminMainEntity, CurrencyCodeIdentifiable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator = "OrderId")
@GenericGenerator(
name="OrderId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="OrderImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.order.domain.OrderImpl")
}
)
@Column(name = "ORDER_ID")
protected Long id;
@Embedded
protected Auditable auditable = new Auditable();
@Column(name = "NAME")
@Index(name="ORDER_NAME_INDEX", columnNames={"NAME"})
@AdminPresentation(friendlyName = "OrderImpl_Order_Name", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.NAME, prominent=true, groupOrder = Presentation.Group.Order.General,
gridOrder = 2000)
protected String name;
@ManyToOne(targetEntity = CustomerImpl.class, optional=false)
@JoinColumn(name = "CUSTOMER_ID", nullable = false)
@Index(name="ORDER_CUSTOMER_INDEX", columnNames={"CUSTOMER_ID"})
@AdminPresentation(friendlyName = "OrderImpl_Customer", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.CUSTOMER, groupOrder = Presentation.Group.Order.General)
@AdminPresentationToOneLookup()
protected Customer customer;
@Column(name = "ORDER_STATUS")
@Index(name="ORDER_STATUS_INDEX", columnNames={"ORDER_STATUS"})
@AdminPresentation(friendlyName = "OrderImpl_Order_Status", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.STATUS, prominent=true, fieldType=SupportedFieldType.BROADLEAF_ENUMERATION,
broadleafEnumeration="org.broadleafcommerce.core.order.service.type.OrderStatus",
groupOrder = Presentation.Group.Order.General, gridOrder = 3000)
protected String status;
@Column(name = "TOTAL_TAX", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderImpl_Order_Total_Tax", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.TOTALTAX, fieldType=SupportedFieldType.MONEY,
groupOrder = Presentation.Group.Order.General)
protected BigDecimal totalTax;
@Column(name = "TOTAL_SHIPPING", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderImpl_Order_Total_Shipping", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.TOTALFGCHARGES, fieldType=SupportedFieldType.MONEY,
groupOrder = Presentation.Group.Order.General)
protected BigDecimal totalFulfillmentCharges;
@Column(name = "ORDER_SUBTOTAL", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderImpl_Order_Subtotal", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.SUBTOTAL, fieldType=SupportedFieldType.MONEY,prominent=true,
groupOrder = Presentation.Group.Order.General,
gridOrder = 4000)
protected BigDecimal subTotal;
@Column(name = "ORDER_TOTAL", precision=19, scale=5)
@AdminPresentation(friendlyName = "OrderImpl_Order_Total", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.TOTAL, fieldType= SupportedFieldType.MONEY,
groupOrder = Presentation.Group.Order.General)
protected BigDecimal total;
@Column(name = "SUBMIT_DATE")
@AdminPresentation(friendlyName = "OrderImpl_Order_Submit_Date", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.SUBMITDATE, groupOrder = Presentation.Group.Order.General, prominent = true,
gridOrder = 5000)
protected Date submitDate;
@Column(name = "ORDER_NUMBER")
@Index(name="ORDER_NUMBER_INDEX", columnNames={"ORDER_NUMBER"})
@AdminPresentation(friendlyName = "OrderImpl_Order_Number", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.ORDERNUMBER, prominent=true, groupOrder = Presentation.Group.Order.General,
gridOrder = 1000)
private String orderNumber;
@Column(name = "EMAIL_ADDRESS")
@Index(name="ORDER_EMAIL_INDEX", columnNames={"EMAIL_ADDRESS"})
@AdminPresentation(friendlyName = "OrderImpl_Order_Email_Address", group = Presentation.Group.Name.General,
order=Presentation.FieldOrder.EMAILADDRESS, groupOrder = Presentation.Group.Order.General)
protected String emailAddress;
@OneToMany(mappedBy = "order", targetEntity = OrderItemImpl.class, cascade = {CascadeType.ALL})
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderImpl_Order_Items",
tab = Presentation.Tab.Name.OrderItems, tabOrder = Presentation.Tab.Order.OrderItems)
protected List<OrderItem> orderItems = new ArrayList<OrderItem>();
@OneToMany(mappedBy = "order", targetEntity = FulfillmentGroupImpl.class, cascade = {CascadeType.ALL})
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderImpl_Fulfillment_Groups",
tab = Presentation.Tab.Name.FulfillmentGroups, tabOrder = Presentation.Tab.Order.FulfillmentGroups)
protected List<FulfillmentGroup> fulfillmentGroups = new ArrayList<FulfillmentGroup>();
@OneToMany(mappedBy = "order", targetEntity = OrderAdjustmentImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderImpl_Adjustments",
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced,
order = Presentation.FieldOrder.ADJUSTMENTS)
protected List<OrderAdjustment> orderAdjustments = new ArrayList<OrderAdjustment>();
@ManyToMany(fetch = FetchType.LAZY, targetEntity = OfferCodeImpl.class)
@JoinTable(name = "BLC_ORDER_OFFER_CODE_XREF", joinColumns = @JoinColumn(name = "ORDER_ID",
referencedColumnName = "ORDER_ID"), inverseJoinColumns = @JoinColumn(name = "OFFER_CODE_ID",
referencedColumnName = "OFFER_CODE_ID"))
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderImpl_Offer_Codes",
tab = Presentation.Tab.Name.Advanced, tabOrder = Presentation.Tab.Order.Advanced,
manyToField = "orders", order = Presentation.FieldOrder.OFFERCODES)
protected List<OfferCode> addedOfferCodes = new ArrayList<OfferCode>();
@OneToMany(mappedBy = "order", targetEntity = CandidateOrderOfferImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
protected List<CandidateOrderOffer> candidateOrderOffers = new ArrayList<CandidateOrderOffer>();
@OneToMany(mappedBy = "order", targetEntity = PaymentInfoImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@AdminPresentationCollection(friendlyName="OrderImpl_Payment_Infos",
tab = Presentation.Tab.Name.Payment, tabOrder = Presentation.Tab.Order.Payment)
protected List<PaymentInfo> paymentInfos = new ArrayList<PaymentInfo>();
@ManyToMany(targetEntity=OfferInfoImpl.class)
@JoinTable(name = "BLC_ADDITIONAL_OFFER_INFO", joinColumns = @JoinColumn(name = "BLC_ORDER_ORDER_ID",
referencedColumnName = "ORDER_ID"), inverseJoinColumns = @JoinColumn(name = "OFFER_INFO_ID",
referencedColumnName = "OFFER_INFO_ID"))
@MapKeyJoinColumn(name = "OFFER_ID")
@MapKeyClass(OfferImpl.class)
@Cascade(value={org.hibernate.annotations.CascadeType.ALL, org.hibernate.annotations.CascadeType.DELETE_ORPHAN})
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@BatchSize(size = 50)
protected Map<Offer, OfferInfo> additionalOfferInformation = new HashMap<Offer, OfferInfo>();
@OneToMany(mappedBy = "order", targetEntity = OrderAttributeImpl.class, cascade = { CascadeType.ALL },
orphanRemoval = true)
@Cache(usage=CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region="blOrderElements")
@MapKey(name="name")
@AdminPresentationMap(friendlyName = "OrderImpl_Attributes",
forceFreeFormKeys = true, keyPropertyFriendlyName = "OrderImpl_Attributes_Key_Name"
)
protected Map<String,OrderAttribute> orderAttributes = new HashMap<String,OrderAttribute>();
@ManyToOne(targetEntity = BroadleafCurrencyImpl.class)
@JoinColumn(name = "CURRENCY_CODE")
@AdminPresentation(excluded = true)
protected BroadleafCurrency currency;
@ManyToOne(targetEntity = LocaleImpl.class)
@JoinColumn(name = "LOCALE_CODE")
@AdminPresentation(excluded = true)
protected Locale locale;
@Override
public Long getId() {
return id;
}
@Override
public void setId(Long id) {
this.id = id;
}
@Override
public Auditable getAuditable() {
return auditable;
}
@Override
public void setAuditable(Auditable auditable) {
this.auditable = auditable;
}
@Override
public Money getSubTotal() {
return subTotal == null ? null : BroadleafCurrencyUtils.getMoney(subTotal, getCurrency());
}
@Override
public void setSubTotal(Money subTotal) {
this.subTotal = Money.toAmount(subTotal);
}
@Override
public Money calculateSubTotal() {
Money calculatedSubTotal = BroadleafCurrencyUtils.getMoney(getCurrency());
for (OrderItem orderItem : orderItems) {
calculatedSubTotal = calculatedSubTotal.add(orderItem.getTotalPrice());
}
return calculatedSubTotal;
}
@Override
public void assignOrderItemsFinalPrice() {
for (OrderItem orderItem : orderItems) {
orderItem.assignFinalPrice();
}
}
@Override
public Money getTotal() {
return total == null ? null : BroadleafCurrencyUtils.getMoney(total, getCurrency());
}
@Override
public void setTotal(Money orderTotal) {
this.total = Money.toAmount(orderTotal);
}
@Override
public Money getRemainingTotal() {
Money myTotal = getTotal();
if (myTotal == null) {
return null;
}
Money totalPayments = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getCurrency());
for (PaymentInfo pi : getPaymentInfos()) {
if (pi.getAmount() != null) {
totalPayments = totalPayments.add(pi.getAmount());
}
}
return myTotal.subtract(totalPayments);
}
@Override
public Money getCapturedTotal() {
Money totalCaptured = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getCurrency());
for (PaymentInfo pi : getPaymentInfos()) {
totalCaptured = totalCaptured.add(pi.getPaymentCapturedAmount());
}
return totalCaptured;
}
@Override
public Date getSubmitDate() {
return submitDate;
}
@Override
public void setSubmitDate(Date submitDate) {
this.submitDate = submitDate;
}
@Override
public Customer getCustomer() {
return customer;
}
@Override
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public OrderStatus getStatus() {
return OrderStatus.getInstance(status);
}
@Override
public void setStatus(OrderStatus status) {
this.status = status.getType();
}
@Override
public List<OrderItem> getOrderItems() {
return orderItems;
}
@Override
public void setOrderItems(List<OrderItem> orderItems) {
this.orderItems = orderItems;
}
@Override
public void addOrderItem(OrderItem orderItem) {
orderItems.add(orderItem);
}
@Override
public List<FulfillmentGroup> getFulfillmentGroups() {
return fulfillmentGroups;
}
@Override
public void setFulfillmentGroups(List<FulfillmentGroup> fulfillmentGroups) {
this.fulfillmentGroups = fulfillmentGroups;
}
@Override
public void setCandidateOrderOffers(List<CandidateOrderOffer> candidateOrderOffers) {
this.candidateOrderOffers = candidateOrderOffers;
}
@Override
public List<CandidateOrderOffer> getCandidateOrderOffers() {
return candidateOrderOffers;
}
@Override
public String getName() {
return name;
}
@Override
public void setName(String name) {
this.name = name;
}
@Override
public Money getTotalTax() {
return totalTax == null ? null : BroadleafCurrencyUtils.getMoney(totalTax, getCurrency());
}
@Override
public void setTotalTax(Money totalTax) {
this.totalTax = Money.toAmount(totalTax);
}
@Override
public Money getTotalShipping() {
return getTotalFulfillmentCharges();
}
@Override
public void setTotalShipping(Money totalShipping) {
setTotalFulfillmentCharges(totalShipping);
}
@Override
public Money getTotalFulfillmentCharges() {
return totalFulfillmentCharges == null ? null : BroadleafCurrencyUtils.getMoney(totalFulfillmentCharges,
getCurrency());
}
@Override
public void setTotalFulfillmentCharges(Money totalFulfillmentCharges) {
this.totalFulfillmentCharges = Money.toAmount(totalFulfillmentCharges);
}
@Override
public List<PaymentInfo> getPaymentInfos() {
return paymentInfos;
}
@Override
public void setPaymentInfos(List<PaymentInfo> paymentInfos) {
this.paymentInfos = paymentInfos;
}
@Override
public boolean hasCategoryItem(String categoryName) {
for (OrderItem orderItem : orderItems) {
if(orderItem.isInCategory(categoryName)) {
return true;
}
}
return false;
}
@Override
public List<OrderAdjustment> getOrderAdjustments() {
return this.orderAdjustments;
}
protected void setOrderAdjustments(List<OrderAdjustment> orderAdjustments) {
this.orderAdjustments = orderAdjustments;
}
@Override
public List<DiscreteOrderItem> getDiscreteOrderItems() {
List<DiscreteOrderItem> discreteOrderItems = new ArrayList<DiscreteOrderItem>();
for (OrderItem orderItem : orderItems) {
if (orderItem instanceof BundleOrderItem) {
BundleOrderItemImpl bundleOrderItem = (BundleOrderItemImpl)orderItem;
for (DiscreteOrderItem discreteOrderItem : bundleOrderItem.getDiscreteOrderItems()) {
discreteOrderItems.add(discreteOrderItem);
}
} else if (orderItem instanceof DiscreteOrderItem) {
DiscreteOrderItem discreteOrderItem = (DiscreteOrderItem) orderItem;
discreteOrderItems.add(discreteOrderItem);
}
}
return discreteOrderItems;
}
@Override
public boolean containsSku(Sku sku) {
for (OrderItem orderItem : getOrderItems()) {
if (orderItem instanceof DiscreteOrderItem) {
DiscreteOrderItem discreteOrderItem = (DiscreteOrderItem) orderItem;
if (discreteOrderItem.getSku() != null && discreteOrderItem.getSku().equals(sku)) {
return true;
}
} else if (orderItem instanceof BundleOrderItem) {
BundleOrderItem bundleOrderItem = (BundleOrderItem) orderItem;
if (bundleOrderItem.getSku() != null && bundleOrderItem.getSku().equals(sku)) {
return true;
}
}
}
return false;
}
@Override
public List<OfferCode> getAddedOfferCodes() {
return addedOfferCodes;
}
@Override
public String getOrderNumber() {
return orderNumber;
}
@Override
public void setOrderNumber(String orderNumber) {
this.orderNumber = orderNumber;
}
@Override
public String getFulfillmentStatus() {
return null;
}
@Override
public String getEmailAddress() {
return emailAddress;
}
@Override
public void setEmailAddress(String emailAddress) {
this.emailAddress = emailAddress;
}
@Override
public Map<Offer, OfferInfo> getAdditionalOfferInformation() {
return additionalOfferInformation;
}
@Override
public void setAdditionalOfferInformation(Map<Offer, OfferInfo> additionalOfferInformation) {
this.additionalOfferInformation = additionalOfferInformation;
}
@Override
public Money getItemAdjustmentsValue() {
Money itemAdjustmentsValue = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getCurrency());
for (OrderItem orderItem : orderItems) {
itemAdjustmentsValue = itemAdjustmentsValue.add(orderItem.getTotalAdjustmentValue());
}
return itemAdjustmentsValue;
}
@Override
public Money getFulfillmentGroupAdjustmentsValue() {
Money adjustmentValue = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getCurrency());
for (FulfillmentGroup fulfillmentGroup : fulfillmentGroups) {
adjustmentValue = adjustmentValue.add(fulfillmentGroup.getFulfillmentGroupAdjustmentsValue());
}
return adjustmentValue;
}
@Override
public Money getOrderAdjustmentsValue() {
Money orderAdjustmentsValue = BroadleafCurrencyUtils.getMoney(BigDecimal.ZERO, getCurrency());
for (OrderAdjustment orderAdjustment : orderAdjustments) {
orderAdjustmentsValue = orderAdjustmentsValue.add(orderAdjustment.getValue());
}
return orderAdjustmentsValue;
}
@Override
public Money getTotalAdjustmentsValue() {
Money totalAdjustmentsValue = getItemAdjustmentsValue();
totalAdjustmentsValue = totalAdjustmentsValue.add(getOrderAdjustmentsValue());
totalAdjustmentsValue = totalAdjustmentsValue.add(getFulfillmentGroupAdjustmentsValue());
return totalAdjustmentsValue;
}
@Override
public boolean updatePrices() {
boolean updated = false;
for (OrderItem orderItem : orderItems) {
if (orderItem.updateSaleAndRetailPrices()) {
updated = true;
}
}
return updated;
}
@Override
public boolean finalizeItemPrices() {
boolean updated = false;
for (OrderItem orderItem : orderItems) {
orderItem.finalizePrice();
}
return updated;
}
@Override
public Map<String, OrderAttribute> getOrderAttributes() {
return orderAttributes;
}
@Override
public void setOrderAttributes(Map<String, OrderAttribute> orderAttributes) {
this.orderAttributes = orderAttributes;
}
@Override
@Deprecated
public void addAddedOfferCode(OfferCode offerCode) {
addOfferCode(offerCode);
}
@Override
public void addOfferCode(OfferCode offerCode) {
getAddedOfferCodes().add(offerCode);
}
@Override
public BroadleafCurrency getCurrency() {
return currency;
}
@Override
public void setCurrency(BroadleafCurrency currency) {
this.currency = currency;
}
@Override
public Locale getLocale() {
return locale;
}
@Override
public void setLocale(Locale locale) {
this.locale = locale;
}
@Override
public int getItemCount() {
int count = 0;
for (DiscreteOrderItem doi : getDiscreteOrderItems()) {
count += doi.getQuantity();
}
return count;
}
@Override
public boolean getHasOrderAdjustments() {
Money orderAdjustmentsValue = getOrderAdjustmentsValue();
if (orderAdjustmentsValue != null) {
return (orderAdjustmentsValue.compareTo(BigDecimal.ZERO) != 0);
}
return false;
}
@Override
public String getMainEntityName() {
String customerName = null;
String orderNumber = getOrderNumber();
if (!StringUtils.isEmpty(getCustomer().getFirstName()) && !StringUtils.isEmpty(getCustomer().getLastName())) {
customerName = getCustomer().getFirstName() + " " + getCustomer().getLastName();
}
if (!StringUtils.isEmpty(orderNumber) && !StringUtils.isEmpty(customerName)) {
return orderNumber + " - " + customerName;
}
if (!StringUtils.isEmpty(orderNumber)) {
return orderNumber;
}
if (!StringUtils.isEmpty(customerName)) {
return customerName;
}
return "";
}
@Override
public String getCurrencyCode() {
if (getCurrency() != null) {
return getCurrency().getCurrencyCode();
}
return null;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
OrderImpl other = (OrderImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (customer == null) {
if (other.customer != null) {
return false;
}
} else if (!customer.equals(other.customer)) {
return false;
}
Date myDateCreated = auditable != null ? auditable.getDateCreated() : null;
Date otherDateCreated = other.auditable != null ? other.auditable.getDateCreated() : null;
if (myDateCreated == null) {
if (otherDateCreated != null) {
return false;
}
} else if (!myDateCreated.equals(otherDateCreated)) {
return false;
}
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((customer == null) ? 0 : customer.hashCode());
Date myDateCreated = auditable != null ? auditable.getDateCreated() : null;
result = prime * result + ((myDateCreated == null) ? 0 : myDateCreated.hashCode());
return result;
}
public static class Presentation {
public static class Tab {
public static class Name {
public static final String OrderItems = "OrderImpl_Order_Items_Tab";
public static final String FulfillmentGroups = "OrderImpl_Fulfillment_Groups_Tab";
public static final String Payment = "OrderImpl_Payment_Tab";
public static final String Advanced = "OrderImpl_Advanced_Tab";
}
public static class Order {
public static final int OrderItems = 2000;
public static final int FulfillmentGroups = 3000;
public static final int Payment = 4000;
public static final int Advanced = 5000;
}
}
public static class Group {
public static class Name {
public static final String General = "OrderImpl_Order";
}
public static class Order {
public static final int General = 1000;
}
}
public static class FieldOrder {
public static final int NAME = 1000;
public static final int CUSTOMER = 2000;
public static final int TOTAL = 3000;
public static final int STATUS = 4000;
public static final int SUBTOTAL = 5000;
public static final int ORDERNUMBER = 6000;
public static final int TOTALTAX = 7000;
public static final int TOTALFGCHARGES = 8000;
public static final int SUBMITDATE = 9000;
public static final int EMAILADDRESS = 10000;
public static final int ADJUSTMENTS = 1000;
public static final int OFFERCODES = 2000;
}
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_order_domain_OrderImpl.java
|
196 |
public interface MemoryLockerLinux extends Library {
// http://www-numi.fnal.gov/offline_software/srt_public_context/WebDocs/Errors/unix_system_errors.html
// #define EPERM 1 /* Operation not permitted */ The calling process does not have the appropriate privilege to perform the
// requested operation.
static final int EPERM = 1;
// #define EAGAIN 11 /* Try again */ Some or all of the memory identified by the operation could not be locked when the call was
// made.
static final int EAGAIN = 11;
// #define ENOMEM 12 /* Out of memory */ Locking all of the pages currently mapped into the address space of the process would
// exceed an implementation-dependent limit on the amount of memory that the process may lock.
static final int ENOMEM = 12;
// #define EINVAL 22 /* Invalid argument */ The flags argument is zero, or includes unimplemented flags.
static final int EINVAL = 22;
// #define ENOSYS 38 /* Function not implemented */ The implementation does not support this memory locking interface.
static final int ENOSYS = 38;
// Linux/include/asm-generic/mman.h
//
// 16 #define MCL_CURRENT 1 /* lock all current mappings */
// 17 #define MCL_FUTURE 2 /* lock all future mappings */
static final int LOCK_CURRENT_MEMORY = 1;
static final int LOCK_ALL_MEMORY_DURING_APPLICATION_LIFE = 2;
MemoryLockerLinux INSTANCE = (MemoryLockerLinux) Native.loadLibrary("c", MemoryLockerLinux.class);
/**
* This method locks all memory under *nix operating system using kernel function {@code mlockall}. details of this function you
* can find on {@see http://www.kernel.org/doc/man-pages/online/pages/man2/mlock.2.html}
*
* @param flags
* determines lock memory on startup or during life of application.
*
* @return Upon successful completion, the mlockall() function returns a value of zero. Otherwise, no additional memory is locked,
* and the function returns a value of -1 and sets errno to indicate the error. The effect of failure of mlockall() on
* previously existing locks in the address space is unspecified. If it is supported by the implementation, the
* munlockall() function always returns a value of zero. Otherwise, the function returns a value of -1 and sets errno to
* indicate the error.
*/
int mlockall(int flags);
}
| 0true
|
nativeos_src_main_java_com_orientechnologies_nio_MemoryLockerLinux.java
|
1,217 |
public class DefaultPageCacheRecyclerModule extends AbstractModule {
private final Settings settings;
public DefaultPageCacheRecyclerModule(Settings settings) {
this.settings = settings;
}
@Override
protected void configure() {
bind(PageCacheRecycler.class).asEagerSingleton();
}
}
| 0true
|
src_main_java_org_elasticsearch_cache_recycler_DefaultPageCacheRecyclerModule.java
|
435 |
map.addChangeListener(new OMultiValueChangeListener<Object, String>() {
public void onAfterRecordChanged(final OMultiValueChangeEvent<Object, String> event) {
changed.value = true;
}
});
| 0true
|
core_src_test_java_com_orientechnologies_orient_core_db_record_TrackedMapTest.java
|
487 |
public class ODatabaseExport extends ODatabaseImpExpAbstract {
protected OJSONWriter writer;
protected long recordExported;
public static final int VERSION = 6;
public ODatabaseExport(final ODatabaseRecord iDatabase, final String iFileName, final OCommandOutputListener iListener)
throws IOException {
super(iDatabase, iFileName, iListener);
if (fileName == null)
throw new IllegalArgumentException("file name missing");
if (!fileName.endsWith(".gz")) {
fileName += ".gz";
}
final File f = new File(fileName);
f.mkdirs();
if (f.exists())
f.delete();
writer = new OJSONWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(fileName), 16384))); // 16KB
writer.beginObject();
iDatabase.getLevel1Cache().setEnable(false);
iDatabase.getLevel2Cache().setEnable(false);
}
public ODatabaseExport(final ODatabaseRecord iDatabase, final OutputStream iOutputStream, final OCommandOutputListener iListener)
throws IOException {
super(iDatabase, "streaming", iListener);
writer = new OJSONWriter(new OutputStreamWriter(iOutputStream));
writer.beginObject();
iDatabase.getLevel1Cache().setEnable(false);
iDatabase.getLevel2Cache().setEnable(false);
}
@Override
public ODatabaseExport setOptions(final String s) {
super.setOptions(s);
return this;
}
public ODatabaseExport exportDatabase() {
try {
listener.onMessage("\nStarted export of database '" + database.getName() + "' to " + fileName + "...");
database.getLevel1Cache().setEnable(false);
database.getLevel2Cache().setEnable(false);
long time = System.currentTimeMillis();
if (includeInfo)
exportInfo();
if (includeClusterDefinitions)
exportClusters();
if (includeSchema)
exportSchema();
if (includeRecords)
exportRecords();
if (includeIndexDefinitions)
exportIndexDefinitions();
if (includeManualIndexes)
exportManualIndexes();
listener.onMessage("\n\nDatabase export completed in " + (System.currentTimeMillis() - time) + "ms");
writer.flush();
} catch (Exception e) {
e.printStackTrace();
throw new ODatabaseExportException("Error on exporting database '" + database.getName() + "' to: " + fileName, e);
} finally {
close();
}
return this;
}
public long exportRecords() throws IOException {
long totalFoundRecords = 0;
long totalExportedRecords = 0;
int level = 1;
listener.onMessage("\nExporting records...");
writer.beginCollection(level, true, "records");
int exportedClusters = 0;
int maxClusterId = getMaxClusterId();
for (int i = 0; exportedClusters <= maxClusterId; ++i) {
String clusterName = database.getClusterNameById(i);
exportedClusters++;
long clusterExportedRecordsTot = 0;
if (clusterName != null) {
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName.toUpperCase()))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName.toUpperCase()))
continue;
}
if (excludeClusters != null && excludeClusters.contains(clusterName.toUpperCase()))
continue;
clusterExportedRecordsTot = database.countClusterElements(clusterName);
} else if (includeClusters != null && !includeClusters.isEmpty())
continue;
listener.onMessage("\n- Cluster " + (clusterName != null ? "'" + clusterName + "'" : "NULL") + " (id=" + i + ")...");
long clusterExportedRecordsCurrent = 0;
if (clusterName != null) {
ORecordInternal<?> rec = null;
try {
for (ORecordIteratorCluster<ORecordInternal<?>> it = database.browseCluster(clusterName); it.hasNext();) {
rec = it.next();
if (rec instanceof ODocument) {
// CHECK IF THE CLASS OF THE DOCUMENT IS INCLUDED
ODocument doc = (ODocument) rec;
final String className = doc.getClassName() != null ? doc.getClassName().toUpperCase() : null;
if (includeClasses != null) {
if (!includeClasses.contains(className))
continue;
} else if (excludeClasses != null) {
if (excludeClasses.contains(className))
continue;
}
} else if (includeClasses != null && !includeClasses.isEmpty())
continue;
if (exportRecord(clusterExportedRecordsTot, clusterExportedRecordsCurrent, rec))
clusterExportedRecordsCurrent++;
}
} catch (IOException e) {
OLogManager.instance().error(this, "\nError on exporting record %s because of I/O problems", e, rec.getIdentity());
// RE-THROW THE EXCEPTION UP
throw e;
} catch (OIOException e) {
OLogManager.instance().error(this, "\nError on exporting record %s because of I/O problems", e, rec.getIdentity());
// RE-THROW THE EXCEPTION UP
throw e;
} catch (Throwable t) {
if (rec != null) {
final byte[] buffer = rec.toStream();
OLogManager
.instance()
.error(
this,
"\nError on exporting record %s. It seems corrupted; size: %d bytes, raw content (as string):\n==========\n%s\n==========",
t, rec.getIdentity(), buffer.length, new String(buffer));
}
}
}
listener.onMessage("OK (records=" + clusterExportedRecordsCurrent + "/" + clusterExportedRecordsTot + ")");
totalExportedRecords += clusterExportedRecordsCurrent;
totalFoundRecords += clusterExportedRecordsTot;
}
writer.endCollection(level, true);
listener.onMessage("\n\nDone. Exported " + totalExportedRecords + " of total " + totalFoundRecords + " records\n");
return totalFoundRecords;
}
public void close() {
database.declareIntent(null);
if (writer == null)
return;
try {
writer.endObject();
writer.close();
writer = null;
} catch (IOException e) {
}
}
private void exportClusters() throws IOException {
listener.onMessage("\nExporting clusters...");
writer.beginCollection(1, true, "clusters");
int exportedClusters = 0;
int maxClusterId = getMaxClusterId();
for (int clusterId = 0; clusterId <= maxClusterId; ++clusterId) {
final String clusterName = database.getClusterNameById(clusterId);
// exclude removed clusters
if (clusterName == null)
continue;
// CHECK IF THE CLUSTER IS INCLUDED
if (includeClusters != null) {
if (!includeClusters.contains(clusterName.toUpperCase()))
continue;
} else if (excludeClusters != null) {
if (excludeClusters.contains(clusterName.toUpperCase()))
continue;
}
writer.beginObject(2, true, null);
writer.writeAttribute(0, false, "name", clusterName);
writer.writeAttribute(0, false, "id", clusterId);
writer.writeAttribute(0, false, "type", database.getClusterType(clusterName));
exportedClusters++;
writer.endObject(2, false);
}
listener.onMessage("OK (" + exportedClusters + " clusters)");
writer.endCollection(1, true);
}
protected int getMaxClusterId() {
int totalCluster = -1;
for (String clusterName : database.getClusterNames()) {
if (database.getClusterIdByName(clusterName) > totalCluster)
totalCluster = database.getClusterIdByName(clusterName);
}
return totalCluster;
}
private void exportInfo() throws IOException {
listener.onMessage("\nExporting database info...");
writer.beginObject(1, true, "info");
writer.writeAttribute(2, true, "name", database.getName().replace('\\', '/'));
writer.writeAttribute(2, true, "default-cluster-id", database.getDefaultClusterId());
writer.writeAttribute(2, true, "exporter-version", VERSION);
writer.writeAttribute(2, true, "engine-version", OConstants.ORIENT_VERSION);
final String engineBuild = OConstants.getBuildNumber();
if (engineBuild != null)
writer.writeAttribute(2, true, "engine-build", engineBuild);
writer.writeAttribute(2, true, "storage-config-version", OStorageConfiguration.CURRENT_VERSION);
writer.writeAttribute(2, true, "schema-version", OSchemaShared.CURRENT_VERSION_NUMBER);
writer.writeAttribute(2, true, "mvrbtree-version", OMVRBTreeMapProvider.CURRENT_PROTOCOL_VERSION);
writer.writeAttribute(2, true, "schemaRecordId", database.getStorage().getConfiguration().schemaRecordId);
writer.writeAttribute(2, true, "indexMgrRecordId", database.getStorage().getConfiguration().indexMgrRecordId);
writer.endObject(1, true);
listener.onMessage("OK");
}
private void exportIndexDefinitions() throws IOException {
listener.onMessage("\nExporting index info...");
writer.beginCollection(1, true, "indexes");
final OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
indexManager.reload();
final Collection<? extends OIndex<?>> indexes = indexManager.getIndexes();
for (OIndex<?> index : indexes) {
if (index.getName().equals(ODatabaseImport.EXPORT_IMPORT_MAP_NAME))
continue;
listener.onMessage("\n- Index " + index.getName() + "...");
writer.beginObject(2, true, null);
writer.writeAttribute(3, true, "name", index.getName());
writer.writeAttribute(3, true, "type", index.getType());
if (!index.getClusters().isEmpty())
writer.writeAttribute(3, true, "clustersToIndex", index.getClusters());
if (index.getDefinition() != null) {
writer.beginObject(4, true, "definition");
writer.writeAttribute(5, true, "defClass", index.getDefinition().getClass().getName());
writer.writeAttribute(5, true, "stream", index.getDefinition().toStream());
writer.endObject(4, true);
}
writer.endObject(2, true);
listener.onMessage("OK");
}
writer.endCollection(1, true);
listener.onMessage("\nOK (" + indexes.size() + " indexes)");
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private void exportManualIndexes() throws IOException {
listener.onMessage("\nExporting manual indexes content...");
final OIndexManagerProxy indexManager = database.getMetadata().getIndexManager();
indexManager.reload();
final Collection<? extends OIndex<?>> indexes = indexManager.getIndexes();
ODocument exportEntry = new ODocument();
int manualIndexes = 0;
writer.beginCollection(1, true, "manualIndexes");
for (OIndex<?> index : indexes) {
if (index.getName().equals(ODatabaseImport.EXPORT_IMPORT_MAP_NAME))
continue;
if (!index.isAutomatic()) {
listener.onMessage("\n- Exporting index " + index.getName() + " ...");
writer.beginObject(2, true, null);
writer.writeAttribute(3, true, "name", index.getName());
List<ODocument> indexContent = database.query(new OSQLSynchQuery<ODocument>("select from index:" + index.getName()));
writer.beginCollection(3, true, "content");
int i = 0;
for (ODocument indexEntry : indexContent) {
if (i > 0)
writer.append(",");
final OIndexDefinition indexDefinition = index.getDefinition();
exportEntry.reset();
exportEntry.setLazyLoad(false);
if (indexDefinition instanceof ORuntimeKeyIndexDefinition
&& ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer() != null) {
final OBinarySerializer binarySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
final int dataSize = binarySerializer.getObjectSize(indexEntry.field("key"));
final byte[] binaryContent = new byte[dataSize];
binarySerializer.serialize(indexEntry.field("key"), binaryContent, 0);
exportEntry.field("binary", true);
exportEntry.field("key", binaryContent);
} else {
exportEntry.field("binary", false);
exportEntry.field("key", indexEntry.field("key"));
}
exportEntry.field("rid", indexEntry.field("rid"));
i++;
writer.append(exportEntry.toJSON());
final long percent = indexContent.size() / 10;
if (percent > 0 && (i % percent) == 0)
listener.onMessage(".");
}
writer.endCollection(3, true);
writer.endObject(2, true);
listener.onMessage("OK (entries=" + index.getSize() + ")");
manualIndexes++;
}
}
writer.endCollection(1, true);
listener.onMessage("\nOK (" + manualIndexes + " manual indexes)");
}
private void exportSchema() throws IOException {
listener.onMessage("\nExporting schema...");
writer.beginObject(1, true, "schema");
OSchemaProxy s = (OSchemaProxy) database.getMetadata().getSchema();
writer.writeAttribute(2, true, "version", s.getVersion());
if (!s.getClasses().isEmpty()) {
writer.beginCollection(2, true, "classes");
final List<OClass> classes = new ArrayList<OClass>(s.getClasses());
Collections.sort(classes);
for (OClass cls : classes) {
writer.beginObject(3, true, null);
writer.writeAttribute(0, false, "name", cls.getName());
writer.writeAttribute(0, false, "default-cluster-id", cls.getDefaultClusterId());
writer.writeAttribute(0, false, "cluster-ids", cls.getClusterIds());
if (((OClassImpl) cls).getOverSizeInternal() > 1)
writer.writeAttribute(0, false, "oversize", ((OClassImpl) cls).getOverSizeInternal());
if (cls.isStrictMode())
writer.writeAttribute(0, false, "strictMode", cls.isStrictMode());
if (cls.getSuperClass() != null)
writer.writeAttribute(0, false, "super-class", cls.getSuperClass().getName());
if (cls.getShortName() != null)
writer.writeAttribute(0, false, "short-name", cls.getShortName());
if (cls.isAbstract())
writer.writeAttribute(0, false, "abstract", cls.isAbstract());
if (!cls.properties().isEmpty()) {
writer.beginCollection(4, true, "properties");
final List<OProperty> properties = new ArrayList<OProperty>(cls.declaredProperties());
Collections.sort(properties);
for (OProperty p : properties) {
writer.beginObject(5, true, null);
writer.writeAttribute(0, false, "name", p.getName());
writer.writeAttribute(0, false, "type", p.getType().toString());
if (p.isMandatory())
writer.writeAttribute(0, false, "mandatory", p.isMandatory());
if (p.isReadonly())
writer.writeAttribute(0, false, "readonly", p.isReadonly());
if (p.isNotNull())
writer.writeAttribute(0, false, "not-null", p.isNotNull());
if (p.getLinkedClass() != null)
writer.writeAttribute(0, false, "linked-class", p.getLinkedClass().getName());
if (p.getLinkedType() != null)
writer.writeAttribute(0, false, "linked-type", p.getLinkedType().toString());
if (p.getMin() != null)
writer.writeAttribute(0, false, "min", p.getMin());
if (p.getMax() != null)
writer.writeAttribute(0, false, "max", p.getMax());
if (((OPropertyImpl) p).getCustomInternal() != null)
writer.writeAttribute(0, false, "customFields", ((OPropertyImpl) p).getCustomInternal());
writer.endObject(0, false);
}
writer.endCollection(4, true);
}
writer.endObject(3, true);
}
writer.endCollection(2, true);
}
writer.endObject(1, true);
listener.onMessage("OK (" + s.getClasses().size() + " classes)");
}
private boolean exportRecord(long recordTot, long recordNum, ORecordInternal<?> rec) throws IOException {
if (rec != null)
try {
if (rec.getIdentity().isValid())
rec.reload();
if (useLineFeedForRecords)
writer.append("\n");
if (recordExported > 0)
writer.append(",");
writer.append(rec.toJSON("rid,type,version,class,attribSameRow,keepTypes,alwaysFetchEmbedded,dateAsLong"));
recordExported++;
recordNum++;
if (recordTot > 10 && (recordNum + 1) % (recordTot / 10) == 0)
listener.onMessage(".");
return true;
} catch (Throwable t) {
if (rec != null) {
final byte[] buffer = rec.toStream();
OLogManager
.instance()
.error(
this,
"\nError on exporting record %s. It seems corrupted; size: %d bytes, raw content (as string):\n==========\n%s\n==========",
t, rec.getIdentity(), buffer.length, new String(buffer));
}
}
return false;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_db_tool_ODatabaseExport.java
|
220 |
public interface RuntimeEnvironmentKeyResolver {
/**
* Determine and return the runtime environment; if an implementation is
* unable to determine the runtime environment, null can be returned to
* indicate this.
*/
String resolveRuntimeEnvironmentKey();
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_config_RuntimeEnvironmentKeyResolver.java
|
266 |
public class OCommandExecutorNotFoundException extends OCommandExecutionException {
private static final long serialVersionUID = -7430575036316163711L;
public OCommandExecutorNotFoundException(String message, Throwable cause) {
super(message, cause);
}
public OCommandExecutorNotFoundException(String message) {
super(message);
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_OCommandExecutorNotFoundException.java
|
608 |
public class MemberInfoUpdateOperation extends AbstractClusterOperation implements JoinOperation {
private Collection<MemberInfo> memberInfos;
private long masterTime = Clock.currentTimeMillis();
private boolean sendResponse;
public MemberInfoUpdateOperation() {
memberInfos = new ArrayList<MemberInfo>();
}
public MemberInfoUpdateOperation(Collection<MemberInfo> memberInfos, long masterTime, boolean sendResponse) {
this.masterTime = masterTime;
this.memberInfos = memberInfos;
this.sendResponse = sendResponse;
}
@Override
public void run() throws Exception {
processMemberUpdate();
}
protected final void processMemberUpdate() {
if (isValid()) {
final ClusterServiceImpl clusterService = getService();
clusterService.setMasterTime(masterTime);
clusterService.updateMembers(memberInfos);
}
}
protected final boolean isValid() {
final ClusterServiceImpl clusterService = getService();
final Connection conn = getConnection();
final Address masterAddress = conn != null ? conn.getEndPoint() : null;
boolean isLocal = conn == null;
return isLocal
|| (masterAddress != null && masterAddress.equals(clusterService.getMasterAddress()));
}
@Override
public final boolean returnsResponse() {
return sendResponse;
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
masterTime = in.readLong();
int size = in.readInt();
memberInfos = new ArrayList<MemberInfo>(size);
while (size-- > 0) {
MemberInfo memberInfo = new MemberInfo();
memberInfo.readData(in);
memberInfos.add(memberInfo);
}
sendResponse = in.readBoolean();
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
out.writeLong(masterTime);
out.writeInt(memberInfos.size());
for (MemberInfo memberInfo : memberInfos) {
memberInfo.writeData(out);
}
out.writeBoolean(sendResponse);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("MembersUpdateCall {\n");
for (MemberInfo address : memberInfos) {
sb.append(address).append('\n');
}
sb.append('}');
return sb.toString();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_cluster_MemberInfoUpdateOperation.java
|
1,146 |
public class OSQLMethodIndexOf extends OAbstractSQLMethod {
public static final String NAME = "indexof";
public OSQLMethodIndexOf() {
super(NAME, 1, 2);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
final String param0 = iMethodParams[0].toString();
if (param0.length() > 2) {
String toFind = param0.substring(1, param0.length() - 1);
int startIndex = iMethodParams.length > 1 ? Integer.parseInt(iMethodParams[1].toString()) : 0;
ioResult = ioResult != null ? ioResult.toString().indexOf(toFind, startIndex) : null;
}
return ioResult;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodIndexOf.java
|
284 |
public abstract class ActionRequestBuilder<Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> {
protected final Request request;
protected final InternalGenericClient client;
protected ActionRequestBuilder(InternalGenericClient client, Request request) {
this.client = client;
this.request = request;
}
public Request request() {
return this.request;
}
@SuppressWarnings("unchecked")
public final RequestBuilder setListenerThreaded(boolean listenerThreaded) {
request.listenerThreaded(listenerThreaded);
return (RequestBuilder) this;
}
@SuppressWarnings("unchecked")
public final RequestBuilder putHeader(String key, Object value) {
request.putHeader(key, value);
return (RequestBuilder) this;
}
public ListenableActionFuture<Response> execute() {
PlainListenableActionFuture<Response> future = new PlainListenableActionFuture<Response>(request.listenerThreaded(), client.threadPool());
execute(future);
return future;
}
/**
* Short version of execute().actionGet().
*/
public Response get() throws ElasticsearchException {
return execute().actionGet();
}
/**
* Short version of execute().actionGet().
*/
public Response get(TimeValue timeout) throws ElasticsearchException {
return execute().actionGet(timeout);
}
/**
* Short version of execute().actionGet().
*/
public Response get(String timeout) throws ElasticsearchException {
return execute().actionGet(timeout);
}
public void execute(ActionListener<Response> listener) {
doExecute(listener);
}
protected abstract void doExecute(ActionListener<Response> listener);
}
| 0true
|
src_main_java_org_elasticsearch_action_ActionRequestBuilder.java
|
825 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_OFFER_RULE")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE, region="blStandardElements")
public class OfferRuleImpl implements OfferRule {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "OfferRuleId")
@GenericGenerator(
name="OfferRuleId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="OfferRuleImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.core.offer.domain.OfferRuleImpl")
}
)
@Column(name = "OFFER_RULE_ID")
protected Long id;
@Lob
@Type(type = "org.hibernate.type.StringClobType")
@Column(name = "MATCH_RULE", length = Integer.MAX_VALUE - 1)
protected String matchRule;
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.OfferRule#getId()
*/
@Override
public Long getId() {
return id;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.OfferRule#setId(java.lang.Long)
*/
@Override
public void setId(Long id) {
this.id = id;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.OfferRule#getMatchRule()
*/
@Override
public String getMatchRule() {
return matchRule;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.OfferRule#setMatchRule(java.lang.String)
*/
@Override
public void setMatchRule(String matchRule) {
this.matchRule = matchRule;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((matchRule == null) ? 0 : matchRule.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OfferRuleImpl other = (OfferRuleImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (matchRule == null) {
if (other.matchRule != null)
return false;
} else if (!matchRule.equals(other.matchRule))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_offer_domain_OfferRuleImpl.java
|
693 |
public static class Builder {
private final Client client;
private final Listener listener;
private String name;
private int concurrentRequests = 1;
private int bulkActions = 1000;
private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB);
private TimeValue flushInterval = null;
/**
* Creates a builder of bulk processor with the client to use and the listener that will be used
* to be notified on the completion of bulk requests.
*/
public Builder(Client client, Listener listener) {
this.client = client;
this.listener = listener;
}
/**
* Sets an optional name to identify this bulk processor.
*/
public Builder setName(String name) {
this.name = name;
return this;
}
/**
* Sets the number of concurrent requests allowed to be executed. A value of 0 means that only a single
* request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed
* while accumulating new bulk requests. Defaults to <tt>1</tt>.
*/
public Builder setConcurrentRequests(int concurrentRequests) {
this.concurrentRequests = concurrentRequests;
return this;
}
/**
* Sets when to flush a new bulk request based on the number of actions currently added. Defaults to
* <tt>1000</tt>. Can be set to <tt>-1</tt> to disable it.
*/
public Builder setBulkActions(int bulkActions) {
this.bulkActions = bulkActions;
return this;
}
/**
* Sets when to flush a new bulk request based on the size of actions currently added. Defaults to
* <tt>5mb</tt>. Can be set to <tt>-1</tt> to disable it.
*/
public Builder setBulkSize(ByteSizeValue bulkSize) {
this.bulkSize = bulkSize;
return this;
}
/**
* Sets a flush interval flushing *any* bulk actions pending if the interval passes. Defaults to not set.
* <p/>
* Note, both {@link #setBulkActions(int)} and {@link #setBulkSize(org.elasticsearch.common.unit.ByteSizeValue)}
* can be set to <tt>-1</tt> with the flush interval set allowing for complete async processing of bulk actions.
*/
public Builder setFlushInterval(TimeValue flushInterval) {
this.flushInterval = flushInterval;
return this;
}
/**
* Builds a new bulk processor.
*/
public BulkProcessor build() {
return new BulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_bulk_BulkProcessor.java
|
1,139 |
public class OSQLMethodAsLong extends OAbstractSQLMethod {
public static final String NAME = "aslong";
public OSQLMethodAsLong() {
super(NAME);
}
@Override
public Object execute(OIdentifiable iCurrentRecord, OCommandContext iContext, Object ioResult, Object[] iMethodParams) {
if (ioResult instanceof Number) {
ioResult = ((Number) ioResult).longValue();
} else if (ioResult instanceof Date) {
ioResult = ((Date) ioResult).getTime();
} else {
ioResult = ioResult != null ? new Long(ioResult.toString().trim()) : null;
}
return ioResult;
}
}
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_method_misc_OSQLMethodAsLong.java
|
839 |
public class TransportClearScrollAction extends TransportAction<ClearScrollRequest, ClearScrollResponse> {
private final ClusterService clusterService;
private final SearchServiceTransportAction searchServiceTransportAction;
@Inject
public TransportClearScrollAction(Settings settings, TransportService transportService, ThreadPool threadPool, ClusterService clusterService, SearchServiceTransportAction searchServiceTransportAction) {
super(settings, threadPool);
this.clusterService = clusterService;
this.searchServiceTransportAction = searchServiceTransportAction;
transportService.registerHandler(ClearScrollAction.NAME, new TransportHandler());
}
@Override
protected void doExecute(ClearScrollRequest request, final ActionListener<ClearScrollResponse> listener) {
new Async(request, listener, clusterService.state()).run();
}
private class Async {
final DiscoveryNodes nodes;
final CountDown expectedOps;
final ClearScrollRequest request;
final List<Tuple<String, Long>[]> contexts = new ArrayList<Tuple<String, Long>[]>();
final AtomicReference<Throwable> expHolder;
final ActionListener<ClearScrollResponse> listener;
private Async(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener, ClusterState clusterState) {
int expectedOps = 0;
this.nodes = clusterState.nodes();
if (request.getScrollIds().size() == 1 && "_all".equals(request.getScrollIds().get(0))) {
expectedOps = nodes.size();
} else {
for (String parsedScrollId : request.getScrollIds()) {
Tuple<String, Long>[] context = parseScrollId(parsedScrollId).getContext();
expectedOps += context.length;
this.contexts.add(context);
}
}
this.request = request;
this.listener = listener;
this.expHolder = new AtomicReference<Throwable>();
this.expectedOps = new CountDown(expectedOps);
}
public void run() {
if (expectedOps.isCountedDown()) {
listener.onResponse(new ClearScrollResponse(true));
return;
}
if (contexts.isEmpty()) {
for (final DiscoveryNode node : nodes) {
searchServiceTransportAction.sendClearAllScrollContexts(node, request, new ActionListener<Boolean>() {
@Override
public void onResponse(Boolean success) {
onFreedContext();
}
@Override
public void onFailure(Throwable e) {
onFailedFreedContext(e, node);
}
});
}
} else {
for (Tuple<String, Long>[] context : contexts) {
for (Tuple<String, Long> target : context) {
final DiscoveryNode node = nodes.get(target.v1());
if (node == null) {
onFreedContext();
continue;
}
searchServiceTransportAction.sendFreeContext(node, target.v2(), request, new ActionListener<Boolean>() {
@Override
public void onResponse(Boolean success) {
onFreedContext();
}
@Override
public void onFailure(Throwable e) {
onFailedFreedContext(e, node);
}
});
}
}
}
}
void onFreedContext() {
if (expectedOps.countDown()) {
boolean succeeded = expHolder.get() == null;
listener.onResponse(new ClearScrollResponse(succeeded));
}
}
void onFailedFreedContext(Throwable e, DiscoveryNode node) {
logger.warn("Clear SC failed on node[{}]", e, node);
if (expectedOps.countDown()) {
listener.onResponse(new ClearScrollResponse(false));
} else {
expHolder.set(e);
}
}
}
class TransportHandler extends BaseTransportRequestHandler<ClearScrollRequest> {
@Override
public ClearScrollRequest newInstance() {
return new ClearScrollRequest();
}
@Override
public void messageReceived(final ClearScrollRequest request, final TransportChannel channel) throws Exception {
// no need to use threaded listener, since we just send a response
request.listenerThreaded(false);
execute(request, new ActionListener<ClearScrollResponse>() {
@Override
public void onResponse(ClearScrollResponse response) {
try {
channel.sendResponse(response);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [clear_sc] and request [" + request + "]", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_search_TransportClearScrollAction.java
|
1,612 |
public class ManagementCenterService {
private final static AtomicBoolean DISPLAYED_HOSTED_MANAGEMENT_CENTER_INFO = new AtomicBoolean(false);
public static final int HTTP_SUCCESS = 200;
private final HazelcastInstanceImpl instance;
private final TaskPollThread taskPollThread;
private final StateSendThread stateSendThread;
private final ILogger logger;
private final ConsoleCommandHandler commandHandler;
private final ManagementCenterConfig managementCenterConfig;
private final SerializationService serializationService;
private final ManagementCenterIdentifier identifier;
private final AtomicBoolean isRunning = new AtomicBoolean(false);
private final String clusterId;
private final String securityToken;
private volatile String managementCenterUrl;
private volatile boolean urlChanged = false;
private volatile boolean versionMismatch = false;
public ManagementCenterService(HazelcastInstanceImpl instance) {
this.instance = instance;
logger = instance.node.getLogger(ManagementCenterService.class);
managementCenterConfig = getManagementCenterConfig();
securityToken = managementCenterConfig.getSecurityToken();
managementCenterUrl = getManagementCenterUrl();
clusterId = getClusterId();
commandHandler = new ConsoleCommandHandler(instance);
taskPollThread = new TaskPollThread();
stateSendThread = new StateSendThread();
serializationService = instance.node.getSerializationService();
identifier = newManagementCenterIdentifier();
registerListeners();
logHostedManagementCenterMessages();
}
private void logHostedManagementCenterMessages() {
if (isHostedManagementCenterEnabled()) {
if (isSecurityTokenAvailable()) {
logHostedManagementCenterLoginUrl();
} else {
logHostedManagementCenterRegisterUrl();
}
}
}
private boolean isSecurityTokenAvailable() {
return !isNullOrEmpty(managementCenterConfig.getSecurityToken());
}
private String getManagementCenterUrl() {
if (isHostedManagementCenterEnabled()) {
return getHostedManagementCenterUrl();
} else {
return managementCenterConfig.getUrl();
}
}
private boolean isHostedManagementCenterEnabled() {
if (!getGroupProperties().HOSTED_MANAGEMENT_ENABLED.getBoolean()) {
return false;
}
return isNullOrEmpty(managementCenterConfig.getUrl());
}
private GroupProperties getGroupProperties() {
return instance.node.getGroupProperties();
}
private String getHostedManagementCenterUrl() {
return getGroupProperties().HOSTED_MANAGEMENT_URL.getString();
}
private void registerListeners() {
if(!managementCenterConfig.isEnabled()){
return;
}
instance.getLifecycleService().addLifecycleListener(new LifecycleListenerImpl());
instance.getCluster().addMembershipListener(new MemberListenerImpl());
}
private void logHostedManagementCenterLoginUrl() {
if (managementCenterConfig.isEnabled()) {
logger.info("======================================================");
logger.info("You can access your Hazelcast instance at:");
logger.info(getHostedManagementCenterUrl() + "/start.do?clusterid=" + clusterId);
logger.info("======================================================");
} else {
logger.info("======================================================");
logger.info("To see your application on the Hosted Management Center, " +
"you need to enable the ManagementCenterConfig.");
logger.info("======================================================");
}
}
private void logHostedManagementCenterRegisterUrl() {
//we only want to display the page for hosted management registration once. We don't want to pollute
//the logfile.
if (!DISPLAYED_HOSTED_MANAGEMENT_CENTER_INFO.compareAndSet(false, true)) {
return;
}
logger.info("======================================================");
logger.info("Manage your Hazelcast cluster with the Management Center SaaS Application");
logger.info("To register, copy/paste the following url in your browser and follow the instructions.");
logger.info(getHostedManagementCenterUrl() + "/register.jsp");
logger.info("======================================================");
}
private String getClusterId() {
String clusterId = managementCenterConfig.getClusterId();
if(!isNullOrEmpty(clusterId)){
return clusterId;
}
if (!isHostedManagementCenterEnabled()) {
return null;
}
return newClusterId();
}
private String newClusterId() {
IAtomicReference<String> clusterIdReference = instance.getAtomicReference("___clusterIdGenerator");
String id = clusterIdReference.get();
if (id == null) {
id = UUID.randomUUID().toString().replace("-", "");
if (!clusterIdReference.compareAndSet(null, id)) {
id = clusterIdReference.get();
}
}
return id;
}
private ManagementCenterConfig getManagementCenterConfig() {
ManagementCenterConfig config = instance.node.config.getManagementCenterConfig();
if (config == null) {
throw new IllegalStateException("ManagementCenterConfig can't be null!");
}
return config;
}
private ManagementCenterIdentifier newManagementCenterIdentifier() {
Address address = instance.node.address;
String groupName = instance.getConfig().getGroupConfig().getName();
String version = instance.node.getBuildInfo().getVersion();
return new ManagementCenterIdentifier(version, groupName, address.getHost() + ":" + address.getPort());
}
private static String cleanupUrl(String url) {
if (url == null) {
return null;
}
return url.endsWith("/") ? url : url + '/';
}
public void start() {
if (managementCenterUrl == null) {
logger.warning("Can't start Hazelcast Management Center Service: web-server URL is null!");
return;
}
if (!isRunning.compareAndSet(false, true)) {
//it is already started
return;
}
taskPollThread.start();
stateSendThread.start();
logger.info("Hazelcast will connect to Hazelcast Management Center on address: \n" + managementCenterUrl);
}
public void shutdown() {
if (!isRunning.compareAndSet(true, false)) {
//it is already shutdown.
return;
}
logger.info("Shutting down Hazelcast Management Center Service");
try {
interruptThread(stateSendThread);
interruptThread(taskPollThread);
} catch (Throwable ignored) {
}
}
public byte[] clusterWideUpdateManagementCenterUrl(String groupName, String groupPass, String newUrl) {
try {
GroupConfig groupConfig = instance.getConfig().getGroupConfig();
if (!(groupConfig.getName().equals(groupName) && groupConfig.getPassword().equals(groupPass))) {
return HttpCommand.RES_403;
}
final Collection<MemberImpl> memberList = instance.node.clusterService.getMemberList();
for (MemberImpl member : memberList) {
send(member.getAddress(), new UpdateManagementCenterUrlOperation(newUrl));
}
return HttpCommand.RES_204;
} catch (Throwable throwable) {
logger.warning("New Management Center url cannot be assigned.", throwable);
return HttpCommand.RES_500;
}
}
public void updateManagementCenterUrl(String newUrl) {
if (newUrl == null) {
return;
}
if (newUrl.equals(managementCenterUrl)) {
return;
}
managementCenterUrl = newUrl;
if (!isRunning()) {
start();
}
urlChanged = true;
logger.info("Management Center URL has changed. " +
"Hazelcast will connect to Management Center on address: \n" + managementCenterUrl);
}
private void interruptThread(Thread t) {
if (t != null) {
t.interrupt();
}
}
public void signalVersionMismatch() {
versionMismatch = true;
}
public Object callOnAddress(Address address, Operation operation) {
//todo: why are we always executing on the mapservice??
OperationService operationService = instance.node.nodeEngine.getOperationService();
Future future = operationService.invokeOnTarget(MapService.SERVICE_NAME, operation, address);
try {
return future.get();
} catch (Throwable t) {
StringWriter s = new StringWriter();
t.printStackTrace(new PrintWriter(s));
return s.toString();
}
}
public Object callOnMember(Member member, Operation operation) {
Address address = ((MemberImpl) member).getAddress();
return callOnAddress(address, operation);
}
public void send(Address address, Operation operation) {
//todo: clean up needed.
OperationService operationService = instance.node.nodeEngine.getOperationService();
operationService.createInvocationBuilder(MapService.SERVICE_NAME, operation, address).invoke();
}
public HazelcastInstanceImpl getHazelcastInstance() {
return instance;
}
public ConsoleCommandHandler getCommandHandler() {
return commandHandler;
}
private boolean isRunning() {
return isRunning.get();
}
private void post(HttpURLConnection connection) throws IOException {
//we need to call 'getResponseCode'. If we don't the data placed in the outputstream, will not be send to the
//managementcenter. For more information see:
//http://stackoverflow.com/questions/4844535/why-do-you-have-to-call-urlconnectiongetinputstream-to-be-able-to-write-out-to
int responseCode = connection.getResponseCode();
if (responseCode != HTTP_SUCCESS) {
logger.warning("Failed to send response, responseCode:" + responseCode + " url:" + connection.getURL());
}
}
private void sleepOnVersionMismatch() throws InterruptedException {
if (versionMismatch) {
Thread.sleep(1000 * 60);
versionMismatch = false;
}
}
private class StateSendThread extends Thread {
private final TimedMemberStateFactory timedMemberStateFactory;
private final int updateIntervalMs;
private StateSendThread() {
super(instance.getThreadGroup(), instance.node.getThreadNamePrefix("MC.State.Sender"));
timedMemberStateFactory = new TimedMemberStateFactory(instance);
updateIntervalMs = calcUpdateInterval();
}
private int calcUpdateInterval() {
int updateInterval = managementCenterConfig.getUpdateInterval();
return updateInterval > 0 ? updateInterval * 1000 : 5000;
}
@Override
public void run() {
try {
while (isRunning()) {
sleepOnVersionMismatch();
sendState();
sleep();
}
} catch (Throwable throwable) {
inspectOutputMemoryError(throwable);
logger.warning("Hazelcast Management Center Service will be shutdown due to exception.", throwable);
shutdown();
}
}
private void sleep() throws InterruptedException {
Thread.sleep(updateIntervalMs);
}
private void sendState() throws InterruptedException, MalformedURLException {
URL url = newCollectorUrl();
try {
//todo: does the connection not need to be closed?
HttpURLConnection connection = openConnection(url);
OutputStream outputStream = connection.getOutputStream();
try {
identifier.write(outputStream);
ObjectDataOutputStream out = serializationService.createObjectDataOutputStream(outputStream);
TimedMemberState timedMemberState = timedMemberStateFactory.createTimedMemberState();
timedMemberState.writeData(out);
outputStream.flush();
post(connection);
} finally {
closeResource(outputStream);
}
} catch (ConnectException e) {
if (logger.isFinestEnabled()) {
logger.finest(e);
} else {
logger.info("Failed to connect to:" + url);
}
} catch (Exception e) {
logger.warning(e);
}
}
private HttpURLConnection openConnection(URL url) throws IOException {
if (logger.isFinestEnabled()) {
logger.finest("Opening collector connection:" + url);
}
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setConnectTimeout(5000);
connection.setReadTimeout(5000);
return connection;
}
private URL newCollectorUrl() throws MalformedURLException {
String url = cleanupUrl(managementCenterUrl) + "collector.do";
if (clusterId != null) {
url += "?clusterid=" + clusterId;
}
if (securityToken != null) {
if (clusterId == null) {
url += "?securitytoken=" + securityToken;
} else {
url += "&securitytoken=" + securityToken;
}
}
return new URL(url);
}
}
private class TaskPollThread extends Thread {
private final Map<Integer, Class<? extends ConsoleRequest>> consoleRequests =
new HashMap<Integer, Class<? extends ConsoleRequest>>();
private final Random rand = new Random();
TaskPollThread() {
super(instance.node.threadGroup, instance.node.getThreadNamePrefix("MC.Task.Poller"));
register(new RuntimeStateRequest());
register(new ThreadDumpRequest());
register(new ExecuteScriptRequest());
register(new EvictLocalMapRequest());
register(new ConsoleCommandRequest());
register(new MapConfigRequest());
register(new MemberConfigRequest());
register(new ClusterPropsRequest());
register(new GetLogsRequest());
register(new RunGcRequest());
register(new GetMemberSystemPropertiesRequest());
register(new GetMapEntryRequest());
register(new VersionMismatchLogRequest());
register(new ShutdownMemberRequest());
register(new GetSystemWarningsRequest());
}
public void register(ConsoleRequest consoleRequest) {
consoleRequests.put(consoleRequest.getType(), consoleRequest.getClass());
}
public void processTaskAndPostResponse(int taskId, ConsoleRequest task) {
try {
//todo: don't we need to close this connection?
HttpURLConnection connection = openPostResponseConnection();
OutputStream outputStream = connection.getOutputStream();
try {
identifier.write(outputStream);
ObjectDataOutputStream out = serializationService.createObjectDataOutputStream(outputStream);
out.writeInt(taskId);
out.writeInt(task.getType());
task.writeResponse(ManagementCenterService.this, out);
out.flush();
post(connection);
} finally {
closeResource(outputStream);
}
} catch (Exception e) {
logger.warning("Failed process task:" + task, e);
}
}
private HttpURLConnection openPostResponseConnection() throws IOException {
URL url = newPostResponseUrl();
if (logger.isFinestEnabled()) {
logger.finest("Opening sendResponse connection:" + url);
}
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setConnectTimeout(2000);
connection.setReadTimeout(2000);
return connection;
}
private URL newPostResponseUrl() throws MalformedURLException {
return new URL(cleanupUrl(managementCenterUrl) + "putResponse.do");
}
@Override
public void run() {
try {
while (isRunning()) {
sleepOnVersionMismatch();
processTask();
sleep();
}
} catch (Throwable throwable) {
inspectOutputMemoryError(throwable);
logger.warning("Problem on Hazelcast Management Center Service while polling for a task.", throwable);
}
}
private void sleep() throws InterruptedException {
//todo: magic numbers are no good.
//todo: why the random part
//todo: we want configurable frequency for task polling
Thread.sleep(700 + rand.nextInt(300));
}
private void processTask() {
ObjectDataInputStream inputStream = null;
try {
//todo: don't we need to close the connection?
inputStream = openTaskInputStream();
int taskId = inputStream.readInt();
if (taskId <= 0) {
return;
}
ConsoleRequest task = newTask(inputStream);
processTaskAndPostResponse(taskId, task);
} catch (Exception e) {
//todo: even if there is an internal error with the task, we don't see it. That is kinda shitty
logger.finest(e);
} finally {
IOUtil.closeResource(inputStream);
}
}
private ObjectDataInputStream openTaskInputStream() throws IOException {
URLConnection connection = openGetTaskConnection();
InputStream inputStream = connection.getInputStream();
return serializationService.createObjectDataInputStream(inputStream);
}
private ConsoleRequest newTask(ObjectDataInputStream inputStream)
throws InstantiationException, IllegalAccessException, IOException {
int requestType = inputStream.readInt();
Class<? extends ConsoleRequest> requestClass = consoleRequests.get(requestType);
if (requestClass == null) {
throw new RuntimeException("Failed to find a request for requestType:" + requestType);
}
ConsoleRequest task = requestClass.newInstance();
task.readData(inputStream);
return task;
}
private URLConnection openGetTaskConnection() throws IOException {
URL url = newGetTaskUrl();
if (logger.isFinestEnabled()) {
logger.finest("Opening getTask connection:" + url);
}
URLConnection connection = url.openConnection();
//todo: why do we set this property if the connection is not going to be re-used?
connection.setRequestProperty("Connection", "keep-alive");
return connection;
}
private URL newGetTaskUrl() throws MalformedURLException {
GroupConfig groupConfig = instance.getConfig().getGroupConfig();
Address localAddress = ((MemberImpl) instance.node.getClusterService().getLocalMember()).getAddress();
String urlString = cleanupUrl(managementCenterUrl) + "getTask.do?member=" + localAddress.getHost()
+ ":" + localAddress.getPort() + "&cluster=" + groupConfig.getName();
if (clusterId != null) {
urlString += "&clusterid=" + clusterId;
}
if (securityToken != null) {
urlString += "&securitytoken=" + securityToken;
}
return new URL(urlString);
}
}
private class LifecycleListenerImpl implements LifecycleListener {
@Override
public void stateChanged(final LifecycleEvent event) {
if (event.getState() == LifecycleState.STARTED) {
try {
start();
} catch (Exception e) {
logger.severe("ManagementCenterService could not be started!", e);
}
}
}
}
public class MemberListenerImpl implements MembershipListener {
@Override
public void memberAdded(MembershipEvent membershipEvent) {
try {
Member member = membershipEvent.getMember();
if (member != null && instance.node.isMaster() && urlChanged) {
Operation operation = new UpdateManagementCenterUrlOperation(managementCenterUrl);
callOnMember(member, operation);
}
} catch (Exception e) {
logger.warning("Web server url cannot be send to the newly joined member", e);
}
}
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
}
@Override
public void memberAttributeChanged(MemberAttributeEvent memberAttributeEvent) {
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_management_ManagementCenterService.java
|
571 |
public class OpenIndexRequestBuilder extends AcknowledgedRequestBuilder<OpenIndexRequest, OpenIndexResponse, OpenIndexRequestBuilder> {
public OpenIndexRequestBuilder(IndicesAdminClient indicesClient) {
super((InternalIndicesAdminClient) indicesClient, new OpenIndexRequest());
}
public OpenIndexRequestBuilder(IndicesAdminClient indicesClient, String... indices) {
super((InternalIndicesAdminClient) indicesClient, new OpenIndexRequest(indices));
}
/**
* Sets the indices to be opened
* @param indices the indices to be opened
* @return the request itself
*/
public OpenIndexRequestBuilder setIndices(String... indices) {
request.indices(indices);
return this;
}
/**
* Specifies what type of requested indices to ignore and how to deal with wildcard indices expressions.
* For example indices that don't exist.
*
* @param indicesOptions the desired behaviour regarding indices to ignore and wildcard indices expressions
* @return the request itself
*/
public OpenIndexRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return this;
}
@Override
protected void doExecute(ActionListener<OpenIndexResponse> listener) {
((IndicesAdminClient) client).open(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_open_OpenIndexRequestBuilder.java
|
1,654 |
md.accept(new MetadataVisitor() {
@Override
public void visit(BasicFieldMetadata fmd) {
request.setType(Type.STANDARD);
request.setCeilingEntityClassname(fmd.getForeignKeyClass());
}
@Override
public void visit(BasicCollectionMetadata fmd) {
ForeignKey foreignKey = (ForeignKey) fmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY);
request.setType(Type.STANDARD);
request.setCeilingEntityClassname(fmd.getCollectionCeilingEntity());
request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes());
request.setForeignKey(foreignKey);
}
@Override
public void visit(AdornedTargetCollectionMetadata fmd) {
AdornedTargetList adornedList = (AdornedTargetList) fmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.ADORNEDTARGETLIST);
request.setType(Type.ADORNED);
request.setCeilingEntityClassname(fmd.getCollectionCeilingEntity());
request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes());
request.setAdornedList(adornedList);
}
@Override
public void visit(MapMetadata fmd) {
MapStructure mapStructure = (MapStructure) fmd.getPersistencePerspective()
.getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.MAPSTRUCTURE);
ForeignKey foreignKey = (ForeignKey) fmd.getPersistencePerspective().
getPersistencePerspectiveItems().get(PersistencePerspectiveItemType.FOREIGNKEY);
request.setType(Type.MAP);
request.setCeilingEntityClassname(foreignKey.getForeignKeyClass());
request.setOperationTypesOverride(fmd.getPersistencePerspective().getOperationTypes());
request.setMapStructure(mapStructure);
request.setForeignKey(foreignKey);
}
});
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_domain_PersistencePackageRequest.java
|
285 |
public class OScriptGraphDatabaseWrapper extends OScriptDocumentDatabaseWrapper {
public OScriptGraphDatabaseWrapper(final OGraphDatabase iDatabase) {
super(iDatabase);
}
public OScriptGraphDatabaseWrapper(final ODatabaseDocumentTx iDatabase) {
super(new OGraphDatabase((ODatabaseRecordTx) iDatabase.getUnderlying()));
}
public OScriptGraphDatabaseWrapper(final ODatabaseRecordTx iDatabase) {
super(iDatabase);
}
public OScriptGraphDatabaseWrapper(final String iURL) {
super(iURL);
}
public long countVertexes() {
return ((OGraphDatabase) database).countVertexes();
}
public long countEdges() {
return ((OGraphDatabase) database).countEdges();
}
public Iterable<ODocument> browseVertices() {
return ((OGraphDatabase) database).browseVertices();
}
public Iterable<ODocument> browseVertices(boolean iPolymorphic) {
return ((OGraphDatabase) database).browseVertices(iPolymorphic);
}
public Iterable<ODocument> browseEdges() {
return ((OGraphDatabase) database).browseEdges();
}
public Iterable<ODocument> browseEdges(boolean iPolymorphic) {
return ((OGraphDatabase) database).browseEdges(iPolymorphic);
}
public Iterable<ODocument> browseElements(String iClass, boolean iPolymorphic) {
return ((OGraphDatabase) database).browseElements(iClass, iPolymorphic);
}
public ODocument createVertex() {
return ((OGraphDatabase) database).createVertex();
}
public ODocument createVertex(String iClassName) {
return ((OGraphDatabase) database).createVertex(iClassName);
}
public ODocument createEdge(ORID iSourceVertexRid, ORID iDestVertexRid) {
return ((OGraphDatabase) database).createEdge(iSourceVertexRid, iDestVertexRid);
}
public ODocument createEdge(ORID iSourceVertexRid, ORID iDestVertexRid, String iClassName) {
return ((OGraphDatabase) database).createEdge(iSourceVertexRid, iDestVertexRid, iClassName);
}
public void removeVertex(ODocument iVertex) {
((OGraphDatabase) database).removeVertex(iVertex);
}
public void removeEdge(ODocument iEdge) {
((OGraphDatabase) database).removeEdge(iEdge);
}
public ODocument createEdge(ODocument iSourceVertex, ODocument iDestVertex) {
return ((OGraphDatabase) database).createEdge(iSourceVertex, iDestVertex);
}
public ODocument createEdge(ODocument iOutVertex, ODocument iInVertex, String iClassName) {
return ((OGraphDatabase) database).createEdge(iOutVertex, iInVertex, iClassName);
}
public Set<OIdentifiable> getEdgesBetweenVertexes(ODocument iVertex1, ODocument iVertex2) {
return ((OGraphDatabase) database).getEdgesBetweenVertexes(iVertex1, iVertex2);
}
public Set<OIdentifiable> getEdgesBetweenVertexes(ODocument iVertex1, ODocument iVertex2, String[] iLabels) {
return ((OGraphDatabase) database).getEdgesBetweenVertexes(iVertex1, iVertex2, iLabels);
}
public Set<OIdentifiable> getEdgesBetweenVertexes(ODocument iVertex1, ODocument iVertex2, String[] iLabels, String[] iClassNames) {
return ((OGraphDatabase) database).getEdgesBetweenVertexes(iVertex1, iVertex2, iLabels, iClassNames);
}
public Set<OIdentifiable> getOutEdges(OIdentifiable iVertex) {
return ((OGraphDatabase) database).getOutEdges(iVertex);
}
public Set<OIdentifiable> getOutEdges(OIdentifiable iVertex, String iLabel) {
return ((OGraphDatabase) database).getOutEdges(iVertex, iLabel);
}
public Set<OIdentifiable> getOutEdgesHavingProperties(OIdentifiable iVertex, Map<String, Object> iProperties) {
return ((OGraphDatabase) database).getOutEdgesHavingProperties(iVertex, iProperties);
}
public Set<OIdentifiable> getOutEdgesHavingProperties(OIdentifiable iVertex, Iterable<String> iProperties) {
return ((OGraphDatabase) database).getOutEdgesHavingProperties(iVertex, iProperties);
}
public Set<OIdentifiable> getInEdges(OIdentifiable iVertex) {
return ((OGraphDatabase) database).getInEdges(iVertex);
}
public Set<OIdentifiable> getInEdges(OIdentifiable iVertex, String iLabel) {
return ((OGraphDatabase) database).getInEdges(iVertex, iLabel);
}
public Set<OIdentifiable> getInEdgesHavingProperties(OIdentifiable iVertex, Iterable<String> iProperties) {
return ((OGraphDatabase) database).getInEdgesHavingProperties(iVertex, iProperties);
}
public Set<OIdentifiable> getInEdgesHavingProperties(ODocument iVertex, Map<String, Object> iProperties) {
return ((OGraphDatabase) database).getInEdgesHavingProperties(iVertex, iProperties);
}
public ODocument getInVertex(OIdentifiable iEdge) {
return ((OGraphDatabase) database).getInVertex(iEdge);
}
public ODocument getOutVertex(OIdentifiable iEdge) {
return ((OGraphDatabase) database).getOutVertex(iEdge);
}
public ODocument getRoot(String iName) {
return ((OGraphDatabase) database).getRoot(iName);
}
public ODocument getRoot(String iName, String iFetchPlan) {
return ((OGraphDatabase) database).getRoot(iName, iFetchPlan);
}
public OGraphDatabase setRoot(String iName, ODocument iNode) {
return ((OGraphDatabase) database).setRoot(iName, iNode);
}
public OClass createVertexType(String iClassName) {
return ((OGraphDatabase) database).createVertexType(iClassName);
}
public OClass createVertexType(String iClassName, String iSuperClassName) {
return ((OGraphDatabase) database).createVertexType(iClassName, iSuperClassName);
}
public OClass createVertexType(String iClassName, OClass iSuperClass) {
return ((OGraphDatabase) database).createVertexType(iClassName, iSuperClass);
}
public OClass getVertexType(String iClassName) {
return ((OGraphDatabase) database).getVertexType(iClassName);
}
public OClass createEdgeType(String iClassName) {
return ((OGraphDatabase) database).createEdgeType(iClassName);
}
public OClass createEdgeType(String iClassName, String iSuperClassName) {
return ((OGraphDatabase) database).createEdgeType(iClassName, iSuperClassName);
}
public OClass createEdgeType(String iClassName, OClass iSuperClass) {
return ((OGraphDatabase) database).createEdgeType(iClassName, iSuperClass);
}
public OClass getEdgeType(String iClassName) {
return ((OGraphDatabase) database).getEdgeType(iClassName);
}
public boolean isSafeMode() {
return ((OGraphDatabase) database).isSafeMode();
}
public void setSafeMode(boolean safeMode) {
((OGraphDatabase) database).setSafeMode(safeMode);
}
public OClass getVertexBaseClass() {
return ((OGraphDatabase) database).getVertexBaseClass();
}
public OClass getEdgeBaseClass() {
return ((OGraphDatabase) database).getEdgeBaseClass();
}
public Set<OIdentifiable> filterEdgesByProperties(OMVRBTreeRIDSet iEdges, Iterable<String> iPropertyNames) {
return ((OGraphDatabase) database).filterEdgesByProperties(iEdges, iPropertyNames);
}
public Set<OIdentifiable> filterEdgesByProperties(OMVRBTreeRIDSet iEdges, Map<String, Object> iProperties) {
return ((OGraphDatabase) database).filterEdgesByProperties(iEdges, iProperties);
}
public boolean isUseCustomTypes() {
return ((OGraphDatabase) database).isUseCustomTypes();
}
public void setUseCustomTypes(boolean useCustomTypes) {
((OGraphDatabase) database).setUseCustomTypes(useCustomTypes);
}
public boolean isVertex(ODocument iRecord) {
return ((OGraphDatabase) database).isVertex(iRecord);
}
public boolean isEdge(ODocument iRecord) {
return ((OGraphDatabase) database).isEdge(iRecord);
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_command_script_OScriptGraphDatabaseWrapper.java
|
991 |
threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
try {
performOnPrimary(shard.id(), shard, clusterState);
} catch (Throwable t) {
listener.onFailure(t);
}
}
});
| 0true
|
src_main_java_org_elasticsearch_action_support_replication_TransportShardReplicationOperationAction.java
|
240 |
assertTrueEventually(new AssertTask() {
public void run() throws Exception {
assertEquals(1, map.size());
}
});
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_executor_ClientExecutorServiceSubmitTest.java
|
71 |
@SuppressWarnings("serial")
static final class MapReduceEntriesToIntTask<K,V>
extends BulkTask<K,V,Integer> {
final ObjectToInt<Map.Entry<K,V>> transformer;
final IntByIntToInt reducer;
final int basis;
int result;
MapReduceEntriesToIntTask<K,V> rights, nextRight;
MapReduceEntriesToIntTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceEntriesToIntTask<K,V> nextRight,
ObjectToInt<Map.Entry<K,V>> transformer,
int basis,
IntByIntToInt reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Integer getRawResult() { return result; }
public final void compute() {
final ObjectToInt<Map.Entry<K,V>> transformer;
final IntByIntToInt reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
int r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceEntriesToIntTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceEntriesToIntTask<K,V>
t = (MapReduceEntriesToIntTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
1,212 |
public class PaymentInfoType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, PaymentInfoType> TYPES = new LinkedHashMap<String, PaymentInfoType>();
public static final PaymentInfoType GIFT_CARD = new PaymentInfoType("GIFT_CARD", "Gift Card");
public static final PaymentInfoType CREDIT_CARD = new PaymentInfoType("CREDIT_CARD", "Credit Card");
public static final PaymentInfoType BANK_ACCOUNT = new PaymentInfoType("BANK_ACCOUNT", "Bank Account");
public static final PaymentInfoType PAYPAL = new PaymentInfoType("PAYPAL", "PayPal");
public static final PaymentInfoType CHECK = new PaymentInfoType("CHECK", "Check");
public static final PaymentInfoType ELECTRONIC_CHECK = new PaymentInfoType("ELECTRONIC_CHECK", "Electronic Check");
public static final PaymentInfoType WIRE = new PaymentInfoType("WIRE", "Wire Transfer");
public static final PaymentInfoType MONEY_ORDER = new PaymentInfoType("MONEY_ORDER", "Money Order");
public static final PaymentInfoType CUSTOMER_CREDIT = new PaymentInfoType("CUSTOMER_CREDIT", "Customer Credit");
public static final PaymentInfoType ACCOUNT = new PaymentInfoType("ACCOUNT", "Account");
public static PaymentInfoType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public PaymentInfoType() {
//do nothing
}
public PaymentInfoType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PaymentInfoType other = (PaymentInfoType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_payment_service_type_PaymentInfoType.java
|
2,290 |
public class MapReduceDataSerializerHook
implements DataSerializerHook {
public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.MAP_REDUCE_DS_FACTORY, -23);
public static final int KEY_VALUE_SOURCE_MAP = 0;
public static final int KEY_VALUE_SOURCE_MULTIMAP = 1;
public static final int REDUCER_CHUNK_MESSAGE = 2;
public static final int REDUCER_LAST_CHUNK_MESSAGE = 3;
public static final int TRACKED_JOB_OPERATION = 4;
public static final int REQUEST_PARTITION_MAPPING = 5;
public static final int REQUEST_PARTITION_REDUCING = 6;
public static final int REQUEST_PARTITION_PROCESSED = 7;
public static final int GET_RESULT_OPERATION = 8;
public static final int START_PROCESSING_OPERATION = 9;
public static final int REQUEST_PARTITION_RESULT = 10;
public static final int REDUCING_FINISHED_MESSAGE = 11;
public static final int FIRE_NOTIFICATION_OPERATION = 12;
public static final int REQUEST_MEMBERID_ASSIGNMENT = 13;
public static final int PROCESS_STATS_UPDATE_OPERATION = 14;
public static final int NOTIFY_REMOTE_EXCEPTION_OPERATION = 15;
public static final int CANCEL_JOB_SUPERVISOR_OPERATION = 16;
public static final int POSTPONE_PARTITION_PROCESSING_OPERATION = 17;
public static final int KEY_VALUE_SOURCE_LIST = 18;
public static final int KEY_VALUE_SOURCE_SET = 19;
public static final int KEYS_ASSIGNMENT_RESULT = 20;
public static final int KEYS_ASSIGNMENT_OPERATION = 21;
private static final int LEN = KEYS_ASSIGNMENT_OPERATION + 1;
@Override
public int getFactoryId() {
return F_ID;
}
@Override
public DataSerializableFactory createFactory() {
ConstructorFunction<Integer, IdentifiedDataSerializable> constructors[] = new ConstructorFunction[LEN];
constructors[KEY_VALUE_SOURCE_MAP] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new MapKeyValueSource();
}
};
constructors[KEY_VALUE_SOURCE_MULTIMAP] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new MultiMapKeyValueSource();
}
};
constructors[REDUCER_CHUNK_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new IntermediateChunkNotification();
}
};
constructors[REDUCER_LAST_CHUNK_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new LastChunkNotification();
}
};
constructors[TRACKED_JOB_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new KeyValueJobOperation();
}
};
constructors[REQUEST_PARTITION_MAPPING] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new RequestPartitionMapping();
}
};
constructors[REQUEST_PARTITION_REDUCING] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new RequestPartitionReducing();
}
};
constructors[REQUEST_PARTITION_PROCESSED] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new RequestPartitionProcessed();
}
};
constructors[GET_RESULT_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new GetResultOperation();
}
};
constructors[START_PROCESSING_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new StartProcessingJobOperation();
}
};
constructors[REQUEST_PARTITION_RESULT] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new RequestPartitionResult();
}
};
constructors[REDUCING_FINISHED_MESSAGE] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new ReducingFinishedNotification();
}
};
constructors[FIRE_NOTIFICATION_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new FireNotificationOperation();
}
};
constructors[REQUEST_MEMBERID_ASSIGNMENT] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new RequestMemberIdAssignment();
}
};
constructors[PROCESS_STATS_UPDATE_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new ProcessStatsUpdateOperation();
}
};
constructors[NOTIFY_REMOTE_EXCEPTION_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new NotifyRemoteExceptionOperation();
}
};
constructors[CANCEL_JOB_SUPERVISOR_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new CancelJobSupervisorOperation();
}
};
constructors[KEY_VALUE_SOURCE_LIST] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new ListKeyValueSource();
}
};
constructors[KEY_VALUE_SOURCE_SET] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new SetKeyValueSource();
}
};
constructors[KEYS_ASSIGNMENT_RESULT] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new KeysAssignmentResult();
}
};
constructors[KEYS_ASSIGNMENT_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new KeysAssignmentOperation();
}
};
constructors[POSTPONE_PARTITION_PROCESSING_OPERATION] = new ConstructorFunction<Integer, IdentifiedDataSerializable>() {
@Override
public IdentifiedDataSerializable createNew(Integer arg) {
return new PostPonePartitionProcessing();
}
};
return new ArrayDataSerializableFactory(constructors);
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_MapReduceDataSerializerHook.java
|
151 |
public class ODateSerializer implements OBinarySerializer<Date> {
public static ODateSerializer INSTANCE = new ODateSerializer();
public static final byte ID = 4;
public int getObjectSize(Date object, Object... hints) {
return OLongSerializer.LONG_SIZE;
}
public void serialize(Date object, byte[] stream, int startPosition, Object... hints) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(object);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
ODateTimeSerializer dateTimeSerializer = ODateTimeSerializer.INSTANCE;
dateTimeSerializer.serialize(calendar.getTime(), stream, startPosition);
}
public Date deserialize(byte[] stream, int startPosition) {
ODateTimeSerializer dateTimeSerializer = ODateTimeSerializer.INSTANCE;
return dateTimeSerializer.deserialize(stream, startPosition);
}
public int getObjectSize(byte[] stream, int startPosition) {
return OLongSerializer.LONG_SIZE;
}
public byte getId() {
return ID;
}
public int getObjectSizeNative(byte[] stream, int startPosition) {
return OLongSerializer.LONG_SIZE;
}
public void serializeNative(Date object, byte[] stream, int startPosition, Object... hints) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(object);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
ODateTimeSerializer dateTimeSerializer = ODateTimeSerializer.INSTANCE;
dateTimeSerializer.serializeNative(calendar.getTime(), stream, startPosition);
}
public Date deserializeNative(byte[] stream, int startPosition) {
ODateTimeSerializer dateTimeSerializer = ODateTimeSerializer.INSTANCE;
return dateTimeSerializer.deserializeNative(stream, startPosition);
}
@Override
public void serializeInDirectMemory(Date object, ODirectMemoryPointer pointer, long offset, Object... hints) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(object);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
ODateTimeSerializer dateTimeSerializer = ODateTimeSerializer.INSTANCE;
dateTimeSerializer.serializeInDirectMemory(calendar.getTime(), pointer, offset);
}
@Override
public Date deserializeFromDirectMemory(ODirectMemoryPointer pointer, long offset) {
ODateTimeSerializer dateTimeSerializer = ODateTimeSerializer.INSTANCE;
return dateTimeSerializer.deserializeFromDirectMemory(pointer, offset);
}
@Override
public int getObjectSizeInDirectMemory(ODirectMemoryPointer pointer, long offset) {
return OLongSerializer.LONG_SIZE;
}
public boolean isFixedLength() {
return true;
}
public int getFixedLength() {
return OLongSerializer.LONG_SIZE;
}
@Override
public Date preprocess(Date value, Object... hints) {
final Calendar calendar = Calendar.getInstance();
calendar.setTime(value);
calendar.set(Calendar.HOUR_OF_DAY, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return calendar.getTime();
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_serialization_types_ODateSerializer.java
|
1,411 |
private static final SoftLock LOCK_SUCCESS = new SoftLock() {};
| 1no label
|
hazelcast-hibernate_hazelcast-hibernate3_src_main_java_com_hazelcast_hibernate_distributed_IMapRegionCache.java
|
169 |
public static interface ForkJoinWorkerThreadFactory {
/**
* Returns a new worker thread operating in the given pool.
*
* @param pool the pool this thread works in
* @throws NullPointerException if the pool is null
*/
public ForkJoinWorkerThread newThread(ForkJoinPool pool);
}
| 0true
|
src_main_java_jsr166y_ForkJoinPool.java
|
870 |
searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() {
@Override
public void onResult(FetchSearchResult result) {
result.shardTarget(shardTarget);
fetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onFetchFailure(t, fetchSearchRequest, shardIndex, shardTarget, counter);
}
});
| 0true
|
src_main_java_org_elasticsearch_action_search_type_TransportSearchDfsQueryThenFetchAction.java
|
24 |
class FindOccurrenceLocationVisitor extends Visitor
implements NaturalVisitor {
private Node node;
private int offset;
private OccurrenceLocation occurrence;
private boolean inTypeConstraint = false;
FindOccurrenceLocationVisitor(int offset, Node node) {
this.offset = offset;
this.node = node;
}
OccurrenceLocation getOccurrenceLocation() {
return occurrence;
}
@Override
public void visitAny(Node that) {
if (inBounds(that)) {
super.visitAny(that);
}
//otherwise, as a performance optimization
//don't go any further down this branch
}
@Override
public void visit(Tree.Condition that) {
if (inBounds(that)) {
occurrence = EXPRESSION;
}
super.visit(that);
}
@Override
public void visit(Tree.ExistsCondition that) {
super.visit(that);
if (that.getVariable()==null ?
inBounds(that) :
inBounds(that.getVariable().getIdentifier())) {
occurrence = EXISTS;
}
}
@Override
public void visit(Tree.NonemptyCondition that) {
super.visit(that);
if (that.getVariable()==null ?
inBounds(that) :
inBounds(that.getVariable().getIdentifier())) {
occurrence = NONEMPTY;
}
}
@Override
public void visit(Tree.IsCondition that) {
super.visit(that);
boolean inBounds;
if (that.getVariable()!=null) {
inBounds = inBounds(that.getVariable().getIdentifier());
}
else if (that.getType()!=null) {
inBounds = inBounds(that) && offset>that.getType().getStopIndex()+1;
}
else {
inBounds = false;
}
if (inBounds) {
occurrence = IS;
}
}
public void visit(Tree.TypeConstraint that) {
inTypeConstraint=true;
super.visit(that);
inTypeConstraint=false;
}
public void visit(Tree.ImportMemberOrTypeList that) {
if (inBounds(that)) {
occurrence = IMPORT;
}
super.visit(that);
}
public void visit(Tree.ExtendedType that) {
if (inBounds(that)) {
occurrence = EXTENDS;
}
super.visit(that);
}
public void visit(Tree.SatisfiedTypes that) {
if (inBounds(that)) {
occurrence = inTypeConstraint?
UPPER_BOUND : SATISFIES;
}
super.visit(that);
}
public void visit(Tree.CaseTypes that) {
if (inBounds(that)) {
occurrence = OF;
}
super.visit(that);
}
public void visit(Tree.CatchClause that) {
if (inBounds(that) &&
!inBounds(that.getBlock())) {
occurrence = CATCH;
}
else {
super.visit(that);
}
}
public void visit(Tree.CaseClause that) {
if (inBounds(that) &&
!inBounds(that.getBlock())) {
occurrence = CASE;
}
super.visit(that);
}
@Override
public void visit(Tree.BinaryOperatorExpression that) {
Term right = that.getRightTerm();
if (right==null) {
right = that;
}
Term left = that.getLeftTerm();
if (left==null) {
left = that;
}
if (inBounds(left, right)) {
occurrence = EXPRESSION;
}
super.visit(that);
}
@Override
public void visit(Tree.UnaryOperatorExpression that) {
Term term = that.getTerm();
if (term==null) {
term = that;
}
if (inBounds(that, term) || inBounds(term, that)) {
occurrence = EXPRESSION;
}
super.visit(that);
}
@Override
public void visit(Tree.ParameterList that) {
if (inBounds(that)) {
occurrence = PARAMETER_LIST;
}
super.visit(that);
}
@Override
public void visit(Tree.TypeParameterList that) {
if (inBounds(that)) {
occurrence = TYPE_PARAMETER_LIST;
}
super.visit(that);
}
@Override
public void visit(Tree.TypeSpecifier that) {
if (inBounds(that)) {
occurrence = TYPE_ALIAS;
}
super.visit(that);
}
@Override
public void visit(Tree.ClassSpecifier that) {
if (inBounds(that)) {
occurrence = CLASS_ALIAS;
}
super.visit(that);
}
@Override
public void visit(Tree.SpecifierOrInitializerExpression that) {
if (inBounds(that)) {
occurrence = EXPRESSION;
}
super.visit(that);
}
@Override
public void visit(Tree.ArgumentList that) {
if (inBounds(that)) {
occurrence = EXPRESSION;
}
super.visit(that);
}
@Override
public void visit(Tree.TypeArgumentList that) {
if (inBounds(that)) {
occurrence = TYPE_ARGUMENT_LIST;
}
super.visit(that);
}
@Override
public void visit(QualifiedMemberOrTypeExpression that) {
if (inBounds(that.getMemberOperator(), that.getIdentifier())) {
occurrence = EXPRESSION;
}
else {
super.visit(that);
}
}
@Override
public void visit(Tree.Declaration that) {
if (inBounds(that)) {
if (occurrence!=PARAMETER_LIST) {
occurrence=null;
}
}
super.visit(that);
}
public void visit(Tree.MetaLiteral that) {
super.visit(that);
if (inBounds(that)) {
if (occurrence!=TYPE_ARGUMENT_LIST) {
switch (that.getNodeType()) {
case "ModuleLiteral":
occurrence=MODULE_REF;
break;
case "PackageLiteral":
occurrence=PACKAGE_REF;
break;
case "ValueLiteral":
occurrence=VALUE_REF;
break;
case "FunctionLiteral":
occurrence=FUNCTION_REF;
break;
case "InterfaceLiteral":
occurrence=INTERFACE_REF;
break;
case "ClassLiteral":
occurrence=CLASS_REF;
break;
case "TypeParameterLiteral":
occurrence=TYPE_PARAMETER_REF;
break;
case "AliasLiteral":
occurrence=ALIAS_REF;
break;
default:
occurrence = META;
}
}
}
}
public void visit(Tree.StringLiteral that) {
if (inBounds(that)) {
occurrence = DOCLINK;
}
}
public void visit(Tree.DocLink that) {
if (this.node instanceof Tree.DocLink) {
occurrence = DOCLINK;
}
}
private boolean inBounds(Node that) {
return inBounds(that, that);
}
private boolean inBounds(Node left, Node right) {
if (left==null) return false;
if (right==null) right=left;
Integer startIndex = left.getStartIndex();
Integer stopIndex = right.getStopIndex();
return startIndex!=null && stopIndex!=null &&
startIndex <= node.getStartIndex() &&
stopIndex >= node.getStopIndex();
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_complete_FindOccurrenceLocationVisitor.java
|
817 |
public class ApplyOperation<R> extends AtomicLongBaseOperation {
private IFunction<Long, R> function;
private R returnValue;
public ApplyOperation() {
}
public ApplyOperation(String name, IFunction<Long, R> function) {
super(name);
this.function = function;
}
@Override
public void run() throws Exception {
LongWrapper number = getNumber();
returnValue = function.apply(number.get());
}
@Override
public R getResponse() {
return returnValue;
}
@Override
public int getId() {
return AtomicLongDataSerializerHook.APPLY;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeObject(function);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
function = in.readObject();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_atomiclong_operations_ApplyOperation.java
|
329 |
EntryListener listener = new EntryAdapter() {
@Override
public void entryAdded(EntryEvent event) {
atomicInteger.incrementAndGet();
countDownLatch.countDown();
}
};
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTest.java
|
520 |
public class ClientTransactionManager {
final HazelcastClient client;
final ConcurrentMap<SerializableXID, TransactionProxy> managedTransactions =
new ConcurrentHashMap<SerializableXID, TransactionProxy>();
final ConcurrentMap<SerializableXID, ClientConnection> recoveredTransactions =
new ConcurrentHashMap<SerializableXID, ClientConnection>();
public ClientTransactionManager(HazelcastClient client) {
this.client = client;
}
public HazelcastClient getClient() {
return client;
}
public TransactionContext newTransactionContext() {
return newTransactionContext(TransactionOptions.getDefault());
}
public TransactionContext newTransactionContext(TransactionOptions options) {
return new TransactionContextProxy(this, options);
}
public <T> T executeTransaction(TransactionalTask<T> task) throws TransactionException {
return executeTransaction(TransactionOptions.getDefault(), task);
}
public <T> T executeTransaction(TransactionOptions options, TransactionalTask<T> task) throws TransactionException {
final TransactionContext context = newTransactionContext(options);
context.beginTransaction();
try {
final T value = task.execute(context);
context.commitTransaction();
return value;
} catch (Throwable e) {
context.rollbackTransaction();
if (e instanceof TransactionException) {
throw (TransactionException) e;
}
if (e.getCause() instanceof TransactionException) {
throw (TransactionException) e.getCause();
}
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
throw new TransactionException(e);
}
}
public void addManagedTransaction(Xid xid, TransactionProxy transaction) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
transaction.setXid(sXid);
managedTransactions.put(sXid, transaction);
}
public TransactionProxy getManagedTransaction(Xid xid) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
return managedTransactions.get(sXid);
}
public void removeManagedTransaction(Xid xid) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
managedTransactions.remove(sXid);
}
ClientConnection connect() {
try {
return client.getConnectionManager().tryToConnect(null);
} catch (Exception ignored) {
}
return null;
}
public Xid[] recover() {
final SerializationService serializationService = client.getSerializationService();
final ClientInvocationServiceImpl invocationService = (ClientInvocationServiceImpl) client.getInvocationService();
final Xid[] empty = new Xid[0];
try {
final ClientConnection connection = connect();
if (connection == null) {
return empty;
}
final RecoverAllTransactionsRequest request = new RecoverAllTransactionsRequest();
final ICompletableFuture<SerializableCollection> future = invocationService.send(request, connection);
final SerializableCollection collectionWrapper = serializationService.toObject(future.get());
for (Data data : collectionWrapper) {
final SerializableXID xid = serializationService.toObject(data);
recoveredTransactions.put(xid, connection);
}
final Set<SerializableXID> xidSet = recoveredTransactions.keySet();
return xidSet.toArray(new Xid[xidSet.size()]);
} catch (Exception e) {
ExceptionUtil.rethrow(e);
}
return empty;
}
public boolean recover(Xid xid, boolean commit) {
final SerializableXID sXid = new SerializableXID(xid.getFormatId(),
xid.getGlobalTransactionId(), xid.getBranchQualifier());
final ClientConnection connection = recoveredTransactions.remove(sXid);
if (connection == null) {
return false;
}
final ClientInvocationServiceImpl invocationService = (ClientInvocationServiceImpl) client.getInvocationService();
final RecoverTransactionRequest request = new RecoverTransactionRequest(sXid, commit);
try {
final ICompletableFuture future = invocationService.send(request, connection);
future.get();
} catch (Exception e) {
ExceptionUtil.rethrow(e);
}
return true;
}
public void shutdown() {
managedTransactions.clear();
recoveredTransactions.clear();
}
}
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_txn_ClientTransactionManager.java
|
88 |
@Service("blStaticAssetService")
public class StaticAssetServiceImpl extends AbstractContentService implements StaticAssetService {
private static final Log LOG = LogFactory.getLog(StaticAssetServiceImpl.class);
@Value("${asset.server.url.prefix.internal}")
protected String staticAssetUrlPrefix;
@Value("${asset.server.url.prefix}")
protected String staticAssetEnvironmentUrlPrefix;
@Resource(name = "blImageArtifactProcessor")
protected ImageArtifactProcessor imageArtifactProcessor;
@Value("${asset.use.filesystem.storage}")
protected boolean storeAssetsOnFileSystem = false;
@Value("${asset.server.url.prefix.secure}")
protected String staticAssetEnvironmentSecureUrlPrefix;
@Value("${automatically.approve.static.assets}")
protected boolean automaticallyApproveAndPromoteStaticAssets=true;
@Resource(name="blStaticAssetDao")
protected StaticAssetDao staticAssetDao;
@Resource(name="blSandBoxItemDao")
protected SandBoxItemDao sandBoxItemDao;
@Resource(name="blStaticAssetStorageService")
protected StaticAssetStorageService staticAssetStorageService;
private final Random random = new Random();
private final String FILE_NAME_CHARS = "0123456789abcdef";
@Override
public StaticAsset findStaticAssetById(Long id) {
return staticAssetDao.readStaticAssetById(id);
}
@Override
public List<StaticAsset> readAllStaticAssets() {
return staticAssetDao.readAllStaticAssets();
}
static {
MimeUtil.registerMimeDetector(ExtensionMimeDetector.class.getName());
MimeUtil.registerMimeDetector(MagicMimeMimeDetector.class.getName());
}
protected String getFileExtension(String fileName) {
int pos = fileName.lastIndexOf(".");
if (pos > 0) {
return fileName.substring(pos + 1, fileName.length()).toLowerCase();
} else {
LOG.warn("No extension provided for asset : " + fileName);
return null;
}
}
/**
* Generates a filename as a set of Hex digits.
* @param size
* @return
*/
protected String generateFileName(int size) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < size; i++) {
int pos = random.nextInt(FILE_NAME_CHARS.length());
sb = sb.append(FILE_NAME_CHARS.charAt(pos));
}
return sb.toString();
}
/**
* Will assemble the url from the passed in properties as
* /{entityType}/{fileName}
* /product/7001-ab12
*
* If the properties above are not set, it will generate the fileName randomly.
*
* @param url
* @param asset
* @param assetProperties
* @return
*/
protected String buildAssetURL(Map<String, String> assetProperties, String originalFilename) {
StringBuilder path = new StringBuilder("/");
String entityType = assetProperties.get("entityType");
String entityId = assetProperties.get("entityId");
String fileName = assetProperties.get("fileName");
if (entityType != null) {
path = path.append(entityType).append("/");
}
if (entityId != null) {
path = path.append(entityId).append("/");
}
if (fileName != null) {
int pos = fileName.indexOf(":");
if (pos > 0) {
if (LOG.isTraceEnabled()) {
LOG.trace("Removing protocol from URL name" + fileName);
}
fileName = fileName.substring(pos + 1);
}
} else {
fileName = originalFilename;
}
return path.append(fileName).toString();
}
@Override
@Transactional(TransactionUtils.DEFAULT_TRANSACTION_MANAGER)
public StaticAsset createStaticAssetFromFile(MultipartFile file, Map<String, String> properties) {
if (properties == null) {
properties = new HashMap<String, String>();
}
String fullUrl = buildAssetURL(properties, file.getOriginalFilename());
StaticAsset newAsset = staticAssetDao.readStaticAssetByFullUrl(fullUrl, null);
int count = 0;
while (newAsset != null) {
count++;
//try the new format first, then the old
newAsset = staticAssetDao.readStaticAssetByFullUrl(getCountUrl(fullUrl, count, false), null);
if (newAsset == null) {
newAsset = staticAssetDao.readStaticAssetByFullUrl(getCountUrl(fullUrl, count, true), null);
}
}
if (count > 0) {
fullUrl = getCountUrl(fullUrl, count, false);
}
try {
ImageMetadata metadata = imageArtifactProcessor.getImageMetadata(file.getInputStream());
newAsset = new ImageStaticAssetImpl();
((ImageStaticAsset) newAsset).setWidth(metadata.getWidth());
((ImageStaticAsset) newAsset).setHeight(metadata.getHeight());
} catch (Exception e) {
//must not be an image stream
newAsset = new StaticAssetImpl();
}
if (storeAssetsOnFileSystem) {
newAsset.setStorageType(StorageType.FILESYSTEM);
} else {
newAsset.setStorageType(StorageType.DATABASE);
}
newAsset.setName(file.getOriginalFilename());
getMimeType(file, newAsset);
newAsset.setFileExtension(getFileExtension(file.getOriginalFilename()));
newAsset.setFileSize(file.getSize());
newAsset.setFullUrl(fullUrl);
return staticAssetDao.addOrUpdateStaticAsset(newAsset, false);
}
/**
* Gets the count URL based on the original fullUrl. If requested in legacy format this will return URLs like:
*
* /path/to/image.jpg-1
* /path/to/image.jpg-2
*
* Whereas if this is in non-lagacy format (<b>legacy</b> == false):
*
* /path/to/image-1.jpg
* /path/to/image-2.jpg
*
* Used to deal with duplicate URLs of uploaded assets
*
*/
protected String getCountUrl(String fullUrl, int count, boolean legacyFormat) {
String countUrl = fullUrl + '-' + count;
int dotIndex = fullUrl.lastIndexOf('.');
if (dotIndex != -1 && !legacyFormat) {
countUrl = fullUrl.substring(0, dotIndex) + '-' + count + '.' + fullUrl.substring(dotIndex + 1);
}
return countUrl;
}
protected void getMimeType(MultipartFile file, StaticAsset newAsset) {
Collection mimeTypes = MimeUtil.getMimeTypes(file.getOriginalFilename());
if (!mimeTypes.isEmpty()) {
MimeType mimeType = (MimeType) mimeTypes.iterator().next();
newAsset.setMimeType(mimeType.toString());
} else {
try {
mimeTypes = MimeUtil.getMimeTypes(file.getInputStream());
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
if (!mimeTypes.isEmpty()) {
MimeType mimeType = (MimeType) mimeTypes.iterator().next();
newAsset.setMimeType(mimeType.toString());
}
}
}
@Override
public StaticAsset findStaticAssetByFullUrl(String fullUrl, SandBox targetSandBox) {
try {
fullUrl = URLDecoder.decode(fullUrl, "UTF-8");
//strip out the jsessionid if it's there
fullUrl = fullUrl.replaceAll(";jsessionid=.*?(?=\\?|$)", "");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Unsupported encoding to decode fullUrl", e);
}
return staticAssetDao.readStaticAssetByFullUrl(fullUrl, targetSandBox);
}
@Override
@Transactional(TransactionUtils.DEFAULT_TRANSACTION_MANAGER)
public StaticAsset addStaticAsset(StaticAsset staticAsset, SandBox destinationSandbox) {
if (automaticallyApproveAndPromoteStaticAssets) {
if (destinationSandbox != null && destinationSandbox.getSite() != null) {
destinationSandbox = destinationSandbox.getSite().getProductionSandbox();
} else {
// Null means production for single-site installations.
destinationSandbox = null;
}
}
staticAsset.setSandbox(destinationSandbox);
staticAsset.setDeletedFlag(false);
staticAsset.setArchivedFlag(false);
StaticAsset newAsset = staticAssetDao.addOrUpdateStaticAsset(staticAsset, true);
if (! isProductionSandBox(destinationSandbox)) {
sandBoxItemDao.addSandBoxItem(destinationSandbox.getId(), SandBoxOperationType.ADD, SandBoxItemType.STATIC_ASSET, newAsset.getFullUrl(), newAsset.getId(), null);
}
return newAsset;
}
@Override
@Transactional(TransactionUtils.DEFAULT_TRANSACTION_MANAGER)
public StaticAsset updateStaticAsset(StaticAsset staticAsset, SandBox destSandbox) {
if (staticAsset.getLockedFlag()) {
throw new IllegalArgumentException("Unable to update a locked record");
}
if (automaticallyApproveAndPromoteStaticAssets) {
if (destSandbox != null && destSandbox.getSite() != null) {
destSandbox = destSandbox.getSite().getProductionSandbox();
} else {
// Null means production for single-site installations.
destSandbox = null;
}
}
if (checkForSandboxMatch(staticAsset.getSandbox(), destSandbox)) {
if (staticAsset.getDeletedFlag()) {
SandBoxItem item = sandBoxItemDao.retrieveBySandboxAndTemporaryItemId(staticAsset.getSandbox()==null?null:staticAsset.getSandbox().getId(), SandBoxItemType.STATIC_ASSET, staticAsset.getId());
if (staticAsset.getOriginalAssetId() == null && item != null) {
// This item was added in this sandbox and now needs to be deleted.
staticAsset.setArchivedFlag(true);
item.setArchivedFlag(true);
} else if (item != null) {
// This item was being updated but now is being deleted - so change the
// sandbox operation type to deleted
item.setSandBoxOperationType(SandBoxOperationType.DELETE);
sandBoxItemDao.updateSandBoxItem(item);
} else if (automaticallyApproveAndPromoteStaticAssets) {
staticAsset.setArchivedFlag(true);
}
}
return staticAssetDao.addOrUpdateStaticAsset(staticAsset, true);
} else if (isProductionSandBox(staticAsset.getSandbox())) {
// Move from production to destSandbox
StaticAsset clonedAsset = staticAsset.cloneEntity();
clonedAsset.setOriginalAssetId(staticAsset.getId());
clonedAsset.setSandbox(destSandbox);
StaticAsset returnAsset = staticAssetDao.addOrUpdateStaticAsset(clonedAsset, true);
StaticAsset prod = findStaticAssetById(staticAsset.getId());
prod.setLockedFlag(true);
staticAssetDao.addOrUpdateStaticAsset(prod, false);
SandBoxOperationType type = SandBoxOperationType.UPDATE;
if (clonedAsset.getDeletedFlag()) {
type = SandBoxOperationType.DELETE;
}
sandBoxItemDao.addSandBoxItem(destSandbox.getId(), type, SandBoxItemType.STATIC_ASSET, returnAsset.getFullUrl(), returnAsset.getId(), returnAsset.getOriginalAssetId());
return returnAsset;
} else {
// This should happen via a promote, revert, or reject in the sandbox service
throw new IllegalArgumentException("Update called when promote or reject was expected.");
}
}
// Returns true if the src and dest sandbox are the same.
private boolean checkForSandboxMatch(SandBox src, SandBox dest) {
if (src != null) {
if (dest != null) {
return src.getId().equals(dest.getId());
}
}
return (src == null && dest == null);
}
// // Returns true if the dest sandbox is production.
// private boolean checkForProductionSandbox(SandBox dest) {
// boolean productionSandbox = false;
//
// if (dest == null) {
// productionSandbox = true;
// } else {
// if (dest.getSite() != null && dest.getSite().getProductionSandbox() != null && dest.getSite().getProductionSandbox().getId() != null) {
// productionSandbox = dest.getSite().getProductionSandbox().getId().equals(dest.getId());
// }
// }
//
// return productionSandbox;
// }
// Returns true if the dest sandbox is production.
private boolean isProductionSandBox(SandBox dest) {
return dest == null || SandBoxType.PRODUCTION.equals(dest.getSandBoxType());
}
@Override
@Transactional("blTransactionManager")
public void deleteStaticAsset(StaticAsset staticAsset, SandBox destinationSandbox) {
staticAsset.setDeletedFlag(true);
updateStaticAsset(staticAsset, destinationSandbox);
}
@Override
public List<StaticAsset> findAssets(SandBox sandbox, Criteria c) {
return findItems(sandbox, c, StaticAsset.class, StaticAssetImpl.class, "originalAssetId");
}
@Override
public Long countAssets(SandBox sandbox, Criteria c) {
return countItems(sandbox, c, StaticAssetImpl.class, "originalAssetId");
}
@Override
public void itemPromoted(SandBoxItem sandBoxItem, SandBox destinationSandBox) {
if (! SandBoxItemType.STATIC_ASSET.equals(sandBoxItem.getSandBoxItemType())) {
return;
}
StaticAsset asset = staticAssetDao.readStaticAssetById(sandBoxItem.getTemporaryItemId());
if (asset == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Asset not found " + sandBoxItem.getTemporaryItemId());
}
} else {
boolean productionSandBox = isProductionSandBox(destinationSandBox);
if (productionSandBox) {
asset.setLockedFlag(false);
} else {
asset.setLockedFlag(true);
}
if (productionSandBox && asset.getOriginalAssetId() != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Asset promoted to production. " + asset.getId() + ". Archiving original asset " + asset.getOriginalAssetId());
}
StaticAsset originalAsset = staticAssetDao.readStaticAssetById(sandBoxItem.getTemporaryItemId());
originalAsset.setArchivedFlag(Boolean.TRUE);
staticAssetDao.addOrUpdateStaticAsset(originalAsset, false);
asset.setOriginalAssetId(null);
if (asset.getDeletedFlag()) {
asset.setArchivedFlag(Boolean.TRUE);
}
}
}
if (asset.getOriginalSandBox() == null) {
asset.setOriginalSandBox(asset.getSandbox());
}
asset.setSandbox(destinationSandBox);
staticAssetDao.addOrUpdateStaticAsset(asset, false);
}
@Override
public void itemRejected(SandBoxItem sandBoxItem, SandBox destinationSandBox) {
if (! SandBoxItemType.STATIC_ASSET.equals(sandBoxItem.getSandBoxItemType())) {
return;
}
StaticAsset asset = staticAssetDao.readStaticAssetById(sandBoxItem.getTemporaryItemId());
if (asset != null) {
asset.setSandbox(destinationSandBox);
asset.setOriginalSandBox(null);
asset.setLockedFlag(false);
staticAssetDao.addOrUpdateStaticAsset(asset, false);
}
}
@Override
public void itemReverted(SandBoxItem sandBoxItem) {
if (! SandBoxItemType.STATIC_ASSET.equals(sandBoxItem.getSandBoxItemType())) {
return;
}
StaticAsset asset = staticAssetDao.readStaticAssetById(sandBoxItem.getTemporaryItemId());
if (asset != null) {
asset.setArchivedFlag(Boolean.TRUE);
asset.setLockedFlag(false);
staticAssetDao.addOrUpdateStaticAsset(asset, false);
StaticAsset originalAsset = staticAssetDao.readStaticAssetById(sandBoxItem.getOriginalItemId());
originalAsset.setLockedFlag(false);
staticAssetDao.addOrUpdateStaticAsset(originalAsset, false);
}
}
@Override
public String getStaticAssetUrlPrefix() {
return staticAssetUrlPrefix;
}
@Override
public void setStaticAssetUrlPrefix(String staticAssetUrlPrefix) {
this.staticAssetUrlPrefix = staticAssetUrlPrefix;
}
@Override
public String getStaticAssetEnvironmentUrlPrefix() {
return fixEnvironmentUrlPrefix(staticAssetEnvironmentUrlPrefix);
}
@Override
public void setStaticAssetEnvironmentUrlPrefix(String staticAssetEnvironmentUrlPrefix) {
this.staticAssetEnvironmentUrlPrefix = staticAssetEnvironmentUrlPrefix;
}
@Override
public String getStaticAssetEnvironmentSecureUrlPrefix() {
if (StringUtils.isEmpty(staticAssetEnvironmentSecureUrlPrefix)) {
if (!StringUtils.isEmpty(staticAssetEnvironmentUrlPrefix) && staticAssetEnvironmentUrlPrefix.indexOf("http:") >= 0) {
staticAssetEnvironmentSecureUrlPrefix = staticAssetEnvironmentUrlPrefix.replace("http:", "https:");
}
}
return fixEnvironmentUrlPrefix(staticAssetEnvironmentSecureUrlPrefix);
}
public void setStaticAssetEnvironmentSecureUrlPrefix(String staticAssetEnvironmentSecureUrlPrefix) {
this.staticAssetEnvironmentSecureUrlPrefix = staticAssetEnvironmentSecureUrlPrefix;
}
@Override
public boolean getAutomaticallyApproveAndPromoteStaticAssets() {
return automaticallyApproveAndPromoteStaticAssets;
}
@Override
public void setAutomaticallyApproveAndPromoteStaticAssets(boolean automaticallyApproveAndPromoteStaticAssets) {
this.automaticallyApproveAndPromoteStaticAssets = automaticallyApproveAndPromoteStaticAssets;
}
/**
* Trims whitespace. If the value is the same as the internal url prefix, then return
* null.
*
* @param urlPrefix
* @return
*/
private String fixEnvironmentUrlPrefix(String urlPrefix) {
if (urlPrefix != null) {
urlPrefix = urlPrefix.trim();
if ("".equals(urlPrefix)) {
// The value was not set.
urlPrefix = null;
} else if (urlPrefix.equals(staticAssetUrlPrefix)) {
// The value is the same as the default, so no processing needed.
urlPrefix = null;
}
}
if (urlPrefix != null && !urlPrefix.endsWith("/")) {
urlPrefix = urlPrefix + "/";
}
return urlPrefix;
}
/**
* This method will take in an assetPath (think image url) and prepend the
* staticAssetUrlPrefix if one exists.
*
* Will append any contextPath onto the request. If the incoming assetPath contains
* the internalStaticAssetPrefix and the image is being prepended, the prepend will be
* removed.
*
* @param assetPath - The path to rewrite if it is a cms managed asset
* @param contextPath - The context path of the web application (if applicable)
* @param secureRequest - True if the request is being served over https
* @return
* @see org.broadleafcommerce.cms.file.service.StaticAssetService#getStaticAssetUrlPrefix()
* @see org.broadleafcommerce.cms.file.service.StaticAssetService#getStaticAssetEnvironmentUrlPrefix()
*/
@Override
public String convertAssetPath(String assetPath, String contextPath, boolean secureRequest) {
String returnValue = assetPath;
if (assetPath != null && getStaticAssetEnvironmentUrlPrefix() != null && ! "".equals(getStaticAssetEnvironmentUrlPrefix())) {
final String envPrefix;
if (secureRequest) {
envPrefix = getStaticAssetEnvironmentSecureUrlPrefix();
} else {
envPrefix = getStaticAssetEnvironmentUrlPrefix();
}
if (envPrefix != null) {
// remove the starting "/" if it exists.
if (returnValue.startsWith("/")) {
returnValue = returnValue.substring(1);
}
// Also, remove the "cmsstatic" from the URL before prepending the staticAssetUrlPrefix.
if (returnValue.startsWith(getStaticAssetUrlPrefix())) {
returnValue = returnValue.substring(getStaticAssetUrlPrefix().trim().length());
// remove the starting "/" if it exists.
if (returnValue.startsWith("/")) {
returnValue = returnValue.substring(1);
}
}
returnValue = envPrefix + returnValue;
}
} else {
if (returnValue != null && ! ImportSupport.isAbsoluteUrl(returnValue)) {
if (! returnValue.startsWith("/")) {
returnValue = "/" + returnValue;
}
// Add context path
if (contextPath != null && ! contextPath.equals("")) {
if (! contextPath.equals("/")) {
// Shouldn't be the case, but let's handle it anyway
if (contextPath.endsWith("/")) {
returnValue = returnValue.substring(1);
}
if (contextPath.startsWith("/")) {
returnValue = contextPath + returnValue; // normal case
} else {
returnValue = "/" + contextPath + returnValue;
}
}
}
}
}
return returnValue;
}
}
| 1no label
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_service_StaticAssetServiceImpl.java
|
616 |
public class BroadleafThymeleafViewResolver extends ThymeleafViewResolver {
private static final Log LOG = LogFactory.getLog(BroadleafThymeleafViewResolver.class);
/**
* <p>
* Prefix to be used in view names (returned by controllers) for specifying an
* HTTP redirect with AJAX support. That is, if you want a redirect to be followed
* by the browser as the result of an AJAX call or within an iFrame at the parent
* window, you can utilize this prefix. Note that this requires a JavaScript component,
* which is provided as part of BLC.js
*
* If the request was not performed in an AJAX / iFrame context, this method will
* delegate to the normal "redirect:" prefix.
* </p>
* <p>
* Value: <tt>ajaxredirect:</tt>
* </p>
*/
public static final String AJAX_REDIRECT_URL_PREFIX = "ajaxredirect:";
protected Map<String, String> layoutMap = new HashMap<String, String>();
protected String fullPageLayout = "layout/fullPageLayout";
protected String iframeLayout = "layout/iframeLayout";
/*
* This method is a copy of the same method in ThymeleafViewResolver, but since it is marked private,
* we are unable to call it from the BroadleafThymeleafViewResolver
*/
protected boolean canHandle(final String viewName) {
final String[] viewNamesToBeProcessed = getViewNames();
final String[] viewNamesNotToBeProcessed = getExcludedViewNames();
return ((viewNamesToBeProcessed == null || PatternMatchUtils.simpleMatch(viewNamesToBeProcessed, viewName)) &&
(viewNamesNotToBeProcessed == null || !PatternMatchUtils.simpleMatch(viewNamesNotToBeProcessed, viewName)));
}
/**
* Determines which internal method to call for creating the appropriate view. If no
* Broadleaf specific methods match the viewName, it delegates to the parent
* ThymeleafViewResolver createView method
*/
@Override
protected View createView(final String viewName, final Locale locale) throws Exception {
if (!canHandle(viewName)) {
LOG.trace("[THYMELEAF] View {" + viewName + "} cannot be handled by ThymeleafViewResolver. Passing on to the next resolver in the chain");
return null;
}
if (viewName.startsWith(AJAX_REDIRECT_URL_PREFIX)) {
LOG.trace("[THYMELEAF] View {" + viewName + "} is an ajax redirect, and will be handled directly by BroadleafThymeleafViewResolver");
String redirectUrl = viewName.substring(AJAX_REDIRECT_URL_PREFIX.length());
return loadAjaxRedirectView(redirectUrl, locale);
}
return super.createView(viewName, locale);
}
/**
* Performs a Broadleaf AJAX redirect. This is used in conjunction with BLC.js to support
* doing a browser page change as as result of an AJAX call.
*
* @param redirectUrl
* @param locale
* @return
* @throws Exception
*/
protected View loadAjaxRedirectView(String redirectUrl, final Locale locale) throws Exception {
if (isAjaxRequest()) {
String viewName = "utility/blcRedirect";
addStaticVariable(BroadleafControllerUtility.BLC_REDIRECT_ATTRIBUTE, redirectUrl);
return super.loadView(viewName, locale);
} else {
return new RedirectView(redirectUrl, isRedirectContextRelative(), isRedirectHttp10Compatible());
}
}
@Override
protected View loadView(final String originalViewName, final Locale locale) throws Exception {
String viewName = originalViewName;
if (!isAjaxRequest()) {
String longestPrefix = "";
for (Entry<String, String> entry : layoutMap.entrySet()) {
String viewPrefix = entry.getKey();
String viewLayout = entry.getValue();
if (viewPrefix.length() > longestPrefix.length()) {
if (originalViewName.startsWith(viewPrefix)) {
longestPrefix = viewPrefix;
if (!"NONE".equals(viewLayout)) {
viewName = viewLayout;
}
}
}
}
if (longestPrefix.equals("")) {
viewName = getFullPageLayout();
}
}
AbstractThymeleafView view = (AbstractThymeleafView) super.loadView(viewName, locale);
if (!isAjaxRequest()) {
view.addStaticVariable("templateName", originalViewName);
}
return view;
}
@Override
protected Object getCacheKey(String viewName, Locale locale) {
return viewName + "_" + locale + "_" + isAjaxRequest();
}
protected boolean isIFrameRequest() {
HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();
String iFrameParameter = request.getParameter("blcIFrame");
return (iFrameParameter != null && "true".equals(iFrameParameter));
}
protected boolean isAjaxRequest() {
// First, let's try to get it from the BroadleafRequestContext
HttpServletRequest request = null;
if (BroadleafRequestContext.getBroadleafRequestContext() != null) {
HttpServletRequest brcRequest = BroadleafRequestContext.getBroadleafRequestContext().getRequest();
if (brcRequest != null) {
request = brcRequest;
}
}
// If we didn't find it there, we might be outside of a security-configured uri. Let's see if the filter got it
if (request == null) {
try {
request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();
} catch (ClassCastException e) {
// In portlet environments, we won't be able to cast to a ServletRequestAttributes. We don't want to
// blow up in these scenarios.
LOG.warn("Unable to cast to ServletRequestAttributes and the request in BroadleafRequestContext " +
"was not set. This may introduce incorrect AJAX behavior.");
}
}
// If we still don't have a request object, we'll default to non-ajax
if (request == null) {
return false;
}
return BroadleafControllerUtility.isAjaxRequest(request);
}
/**
* Gets the map of prefix : layout for use in determining which layout
* to dispatch the request to in non-AJAX calls
*
* @return the layout map
*/
public Map<String, String> getLayoutMap() {
return layoutMap;
}
/**
* @see #getLayoutMap()
* @param layoutMap
*/
public void setLayoutMap(Map<String, String> layoutMap) {
this.layoutMap = layoutMap;
}
/**
* The default layout to use if there is no specifc entry in the layout map
*
* @return the full page layout
*/
public String getFullPageLayout() {
return fullPageLayout;
}
/**
* @see #getFullPageLayout()
* @param fullPageLayout
*/
public void setFullPageLayout(String fullPageLayout) {
this.fullPageLayout = fullPageLayout;
}
/**
* The layout to use for iframe requests
*
* @return the iframe layout
*/
public String getIframeLayout() {
return iframeLayout;
}
/**
* @see #getIframeLayout()
* @param iframeLayout
*/
public void setIframeLayout(String iframeLayout) {
this.iframeLayout = iframeLayout;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_web_BroadleafThymeleafViewResolver.java
|
42 |
public interface BiAction<A,B> { void apply(A a, B b); }
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
541 |
public class SpringAppContext implements ApplicationContextAware {
private static ApplicationContext appContext;
public void setApplicationContext(ApplicationContext appContext) throws BeansException {
this.appContext = appContext;
}
public static ApplicationContext getApplicationContext() {
return appContext;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_SpringAppContext.java
|
189 |
private class ConcurrentRandomSliceReader implements Runnable {
private final String[][] values;
private final Set<KeyColumn> d;
private final int startKey;
private final int endKey;
private final boolean deletionEnabled;
public ConcurrentRandomSliceReader(String[][] values, Set<KeyColumn> deleted) {
this.values = values;
this.d = deleted;
this.startKey = 0;
this.endKey = values.length;
this.deletionEnabled = false;
}
public ConcurrentRandomSliceReader(String[][] values, Set<KeyColumn> deleted, int key) {
this.values = values;
this.d = deleted;
this.startKey = key % values.length;
this.endKey = startKey + 1;
this.deletionEnabled = true;
}
@Override
public void run() {
int trials = 5000;
for (int t = 0; t < trials; t++) {
int key = RandomGenerator.randomInt(startKey, endKey);
log.debug("Random key chosen: {} (start={}, end={})", key, startKey, endKey);
int start = RandomGenerator.randomInt(0, numColumns);
if (start == numColumns - 1) {
start = numColumns - 2;
}
int end = RandomGenerator.randomInt(start + 1, numColumns);
int limit = RandomGenerator.randomInt(1, 30);
try {
if (deletionEnabled) {
int delCol = RandomGenerator.randomInt(start, end);
ImmutableList<StaticBuffer> deletions = ImmutableList.of(KeyValueStoreUtil.getBuffer(delCol));
store.mutate(KeyValueStoreUtil.getBuffer(key), KeyColumnValueStore.NO_ADDITIONS, deletions, tx);
log.debug("Deleting ({},{})", key, delCol);
d.add(new KeyColumn(key, delCol));
tx.commit();
tx = startTx();
}
//clopen();
checkSlice(values, d, key, start, end, limit);
checkSlice(values, d, key, start, end, -1);
} catch (BackendException e) {
throw new RuntimeException(e);
}
}
}
}
| 0true
|
titan-test_src_main_java_com_thinkaurelius_titan_diskstorage_KeyColumnValueStoreTest.java
|
1,061 |
private static class FieldStrings {
// term statistics strings
public static final XContentBuilderString TTF = new XContentBuilderString("ttf");
public static final XContentBuilderString DOC_FREQ = new XContentBuilderString("doc_freq");
public static final XContentBuilderString TERM_FREQ = new XContentBuilderString("term_freq");
// field statistics strings
public static final XContentBuilderString FIELD_STATISTICS = new XContentBuilderString("field_statistics");
public static final XContentBuilderString DOC_COUNT = new XContentBuilderString("doc_count");
public static final XContentBuilderString SUM_DOC_FREQ = new XContentBuilderString("sum_doc_freq");
public static final XContentBuilderString SUM_TTF = new XContentBuilderString("sum_ttf");
public static final XContentBuilderString TOKENS = new XContentBuilderString("tokens");
public static final XContentBuilderString POS = new XContentBuilderString("position");
public static final XContentBuilderString START_OFFSET = new XContentBuilderString("start_offset");
public static final XContentBuilderString END_OFFSET = new XContentBuilderString("end_offset");
public static final XContentBuilderString PAYLOAD = new XContentBuilderString("payload");
public static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
public static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
public static final XContentBuilderString _ID = new XContentBuilderString("_id");
public static final XContentBuilderString _VERSION = new XContentBuilderString("_version");
public static final XContentBuilderString FOUND = new XContentBuilderString("found");
public static final XContentBuilderString TERMS = new XContentBuilderString("terms");
public static final XContentBuilderString TERM_VECTORS = new XContentBuilderString("term_vectors");
}
| 0true
|
src_main_java_org_elasticsearch_action_termvector_TermVectorResponse.java
|
362 |
public static class GroupingTestMapper
implements Mapper<Integer, Integer, String, Integer> {
@Override
public void map(Integer key, Integer value, Context<String, Integer> collector) {
collector.emit(String.valueOf(key % 4), value);
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
|
3,610 |
final class TransactionImpl implements Transaction, TransactionSupport {
private static final ThreadLocal<Boolean> THREAD_FLAG = new ThreadLocal<Boolean>();
private static final int ROLLBACK_TIMEOUT_MINUTES = 5;
private static final int COMMIT_TIMEOUT_MINUTES = 5;
private final TransactionManagerServiceImpl transactionManagerService;
private final NodeEngine nodeEngine;
private final List<TransactionLog> txLogs = new LinkedList<TransactionLog>();
private final Map<Object, TransactionLog> txLogMap = new HashMap<Object, TransactionLog>();
private final String txnId;
private Long threadId;
private final long timeoutMillis;
private final int durability;
private final TransactionType transactionType;
private final String txOwnerUuid;
private final boolean checkThreadAccess;
private State state = NO_TXN;
private long startTime;
private Address[] backupAddresses;
private SerializableXID xid;
public TransactionImpl(TransactionManagerServiceImpl transactionManagerService, NodeEngine nodeEngine,
TransactionOptions options, String txOwnerUuid) {
this.transactionManagerService = transactionManagerService;
this.nodeEngine = nodeEngine;
this.txnId = UuidUtil.buildRandomUuidString();
this.timeoutMillis = options.getTimeoutMillis();
this.durability = options.getDurability();
this.transactionType = options.getTransactionType();
this.txOwnerUuid = txOwnerUuid == null ? nodeEngine.getLocalMember().getUuid() : txOwnerUuid;
this.checkThreadAccess = txOwnerUuid != null;
}
// used by tx backups
TransactionImpl(TransactionManagerServiceImpl transactionManagerService, NodeEngine nodeEngine,
String txnId, List<TransactionLog> txLogs, long timeoutMillis, long startTime, String txOwnerUuid) {
this.transactionManagerService = transactionManagerService;
this.nodeEngine = nodeEngine;
this.txnId = txnId;
this.timeoutMillis = timeoutMillis;
this.startTime = startTime;
this.durability = 0;
this.transactionType = TransactionType.TWO_PHASE;
this.txLogs.addAll(txLogs);
this.state = PREPARED;
this.txOwnerUuid = txOwnerUuid;
this.checkThreadAccess = false;
}
public void setXid(SerializableXID xid) {
this.xid = xid;
}
public SerializableXID getXid() {
return xid;
}
@Override
public String getTxnId() {
return txnId;
}
public TransactionType getTransactionType() {
return transactionType;
}
@Override
public void addTransactionLog(TransactionLog transactionLog) {
if (state != Transaction.State.ACTIVE) {
throw new TransactionNotActiveException("Transaction is not active!");
}
checkThread();
// there should be just one tx log for the same key. so if there is older we are removing it
if (transactionLog instanceof KeyAwareTransactionLog) {
KeyAwareTransactionLog keyAwareTransactionLog = (KeyAwareTransactionLog) transactionLog;
TransactionLog removed = txLogMap.remove(keyAwareTransactionLog.getKey());
if (removed != null) {
txLogs.remove(removed);
}
}
txLogs.add(transactionLog);
if (transactionLog instanceof KeyAwareTransactionLog) {
KeyAwareTransactionLog keyAwareTransactionLog = (KeyAwareTransactionLog) transactionLog;
txLogMap.put(keyAwareTransactionLog.getKey(), keyAwareTransactionLog);
}
}
public TransactionLog getTransactionLog(Object key) {
return txLogMap.get(key);
}
public List<TransactionLog> getTxLogs() {
return txLogs;
}
public void removeTransactionLog(Object key) {
TransactionLog removed = txLogMap.remove(key);
if (removed != null) {
txLogs.remove(removed);
}
}
private void checkThread() {
if (!checkThreadAccess && threadId != null && threadId.longValue() != Thread.currentThread().getId()) {
throw new IllegalStateException("Transaction cannot span multiple threads!");
}
}
public void begin() throws IllegalStateException {
if (state == ACTIVE) {
throw new IllegalStateException("Transaction is already active");
}
if (THREAD_FLAG.get() != null) {
throw new IllegalStateException("Nested transactions are not allowed!");
}
//init caller thread
if(threadId == null){
threadId = Thread.currentThread().getId();
setThreadFlag(Boolean.TRUE);
}
startTime = Clock.currentTimeMillis();
backupAddresses = transactionManagerService.pickBackupAddresses(durability);
if (durability > 0 && backupAddresses != null && transactionType == TransactionType.TWO_PHASE) {
List<Future> futures = startTxBackup();
awaitTxBackupCompletion(futures);
}
state = ACTIVE;
}
private void awaitTxBackupCompletion(List<Future> futures) {
for (Future future : futures) {
try {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
} catch (MemberLeftException e) {
nodeEngine.getLogger(Transaction.class).warning("Member left while replicating tx begin: " + e);
} catch (Throwable e) {
if (e instanceof ExecutionException) {
e = e.getCause() != null ? e.getCause() : e;
}
if (e instanceof TargetNotMemberException) {
nodeEngine.getLogger(Transaction.class).warning("Member left while replicating tx begin: " + e);
} else {
throw ExceptionUtil.rethrow(e);
}
}
}
}
private List<Future> startTxBackup() {
final OperationService operationService = nodeEngine.getOperationService();
List<Future> futures = new ArrayList<Future>(backupAddresses.length);
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
final Future f = operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new BeginTxBackupOperation(txOwnerUuid, txnId, xid), backupAddress);
futures.add(f);
}
}
return futures;
}
private void setThreadFlag(Boolean flag) {
if (!checkThreadAccess) {
THREAD_FLAG.set(flag);
}
}
public void prepare() throws TransactionException {
if (state != ACTIVE) {
throw new TransactionNotActiveException("Transaction is not active");
}
checkThread();
checkTimeout();
try {
final List<Future> futures = new ArrayList<Future>(txLogs.size());
state = PREPARING;
for (TransactionLog txLog : txLogs) {
futures.add(txLog.prepare(nodeEngine));
}
for (Future future : futures) {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
}
futures.clear();
state = PREPARED;
if (durability > 0) {
replicateTxnLog();
}
} catch (Throwable e) {
throw ExceptionUtil.rethrow(e, TransactionException.class);
}
}
private void replicateTxnLog() throws InterruptedException, ExecutionException, java.util.concurrent.TimeoutException {
final List<Future> futures = new ArrayList<Future>(txLogs.size());
final OperationService operationService = nodeEngine.getOperationService();
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
final Future f = operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new ReplicateTxOperation(txLogs, txOwnerUuid, txnId, timeoutMillis, startTime),
backupAddress);
futures.add(f);
}
}
for (Future future : futures) {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
}
futures.clear();
}
public void commit() throws TransactionException, IllegalStateException {
try {
if (transactionType.equals(TransactionType.TWO_PHASE) && state != PREPARED) {
throw new IllegalStateException("Transaction is not prepared");
}
if (transactionType.equals(TransactionType.LOCAL) && state != ACTIVE) {
throw new IllegalStateException("Transaction is not active");
}
checkThread();
checkTimeout();
try {
final List<Future> futures = new ArrayList<Future>(txLogs.size());
state = COMMITTING;
for (TransactionLog txLog : txLogs) {
futures.add(txLog.commit(nodeEngine));
}
for (Future future : futures) {
try {
future.get(COMMIT_TIMEOUT_MINUTES, TimeUnit.MINUTES);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during commit!", e);
}
}
state = COMMITTED;
// purge tx backup
purgeTxBackups();
} catch (Throwable e) {
state = COMMIT_FAILED;
throw ExceptionUtil.rethrow(e, TransactionException.class);
}
} finally {
setThreadFlag(null);
}
}
private void checkTimeout() throws TransactionException {
if (startTime + timeoutMillis < Clock.currentTimeMillis()) {
throw new TransactionException("Transaction is timed-out!");
}
}
public void rollback() throws IllegalStateException {
try {
if (state == NO_TXN || state == ROLLED_BACK) {
throw new IllegalStateException("Transaction is not active");
}
checkThread();
state = ROLLING_BACK;
try {
rollbackTxBackup();
final List<Future> futures = new ArrayList<Future>(txLogs.size());
final ListIterator<TransactionLog> iter = txLogs.listIterator(txLogs.size());
while (iter.hasPrevious()) {
final TransactionLog txLog = iter.previous();
futures.add(txLog.rollback(nodeEngine));
}
for (Future future : futures) {
try {
future.get(ROLLBACK_TIMEOUT_MINUTES, TimeUnit.MINUTES);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during rollback!", e);
}
}
// purge tx backup
purgeTxBackups();
} catch (Throwable e) {
throw ExceptionUtil.rethrow(e);
} finally {
state = ROLLED_BACK;
}
} finally {
setThreadFlag(null);
}
}
private void rollbackTxBackup() {
final OperationService operationService = nodeEngine.getOperationService();
final List<Future> futures = new ArrayList<Future>(txLogs.size());
// rollback tx backup
if (durability > 0 && transactionType.equals(TransactionType.TWO_PHASE)) {
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
final Future f = operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new RollbackTxBackupOperation(txnId), backupAddress);
futures.add(f);
}
}
for (Future future : futures) {
try {
future.get(timeoutMillis, TimeUnit.MILLISECONDS);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during tx rollback backup!", e);
}
}
futures.clear();
}
}
public void setRollbackOnly() {
state = ROLLING_BACK;
}
private void purgeTxBackups() {
if (durability > 0 && transactionType.equals(TransactionType.TWO_PHASE)) {
final OperationService operationService = nodeEngine.getOperationService();
for (Address backupAddress : backupAddresses) {
if (nodeEngine.getClusterService().getMember(backupAddress) != null) {
try {
operationService.invokeOnTarget(TransactionManagerServiceImpl.SERVICE_NAME,
new PurgeTxBackupOperation(txnId), backupAddress);
} catch (Throwable e) {
nodeEngine.getLogger(getClass()).warning("Error during purging backups!", e);
}
}
}
}
}
public long getStartTime() {
return startTime;
}
public String getOwnerUuid() {
return txOwnerUuid;
}
public State getState() {
return state;
}
public long getTimeoutMillis() {
return timeoutMillis;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Transaction");
sb.append("{txnId='").append(txnId).append('\'');
sb.append(", state=").append(state);
sb.append(", txType=").append(transactionType);
sb.append(", timeoutMillis=").append(timeoutMillis);
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_transaction_impl_TransactionImpl.java
|
5,920 |
public class GeoDistanceSortParser implements SortParser {
@Override
public String[] names() {
return new String[]{"_geo_distance", "_geoDistance"};
}
@Override
public SortField parse(XContentParser parser, SearchContext context) throws Exception {
String fieldName = null;
GeoPoint point = new GeoPoint();
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
boolean reverse = false;
SortMode sortMode = null;
String nestedPath = null;
Filter nestedFilter = null;
boolean normalizeLon = true;
boolean normalizeLat = true;
XContentParser.Token token;
String currentName = parser.currentName();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
GeoPoint.parse(parser, point);
fieldName = currentName;
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) {
ParsedFilter parsedFilter = context.queryParserService().parseInnerFilter(parser);
nestedFilter = parsedFilter == null ? null : parsedFilter.filter();
} else {
fieldName = currentName;
GeoPoint.parse(parser, point);
}
} else if (token.isValue()) {
if ("reverse".equals(currentName)) {
reverse = parser.booleanValue();
} else if ("order".equals(currentName)) {
reverse = "desc".equals(parser.text());
} else if (currentName.equals("unit")) {
unit = DistanceUnit.fromString(parser.text());
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
geoDistance = GeoDistance.fromString(parser.text());
} else if ("normalize".equals(currentName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
sortMode = SortMode.fromString(parser.text());
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
nestedPath = parser.text();
} else {
point.resetFromString(parser.text());
fieldName = currentName;
}
}
}
if (normalizeLat || normalizeLon) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
if (sortMode == null) {
sortMode = reverse ? SortMode.MAX : SortMode.MIN;
}
if (sortMode == SortMode.SUM) {
throw new ElasticsearchIllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance");
}
FieldMapper mapper = context.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new ElasticsearchIllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort");
}
IndexGeoPointFieldData indexFieldData = context.fieldData().getForField(mapper);
IndexFieldData.XFieldComparatorSource geoDistanceComparatorSource = new GeoDistanceComparatorSource(
indexFieldData, point.lat(), point.lon(), unit, geoDistance, sortMode
);
ObjectMapper objectMapper;
if (nestedPath != null) {
ObjectMappers objectMappers = context.mapperService().objectMapper(nestedPath);
if (objectMappers == null) {
throw new ElasticsearchIllegalArgumentException("failed to find nested object mapping for explicit nested path [" + nestedPath + "]");
}
objectMapper = objectMappers.mapper();
if (!objectMapper.nested().isNested()) {
throw new ElasticsearchIllegalArgumentException("mapping for explicit nested path is not mapped as nested: [" + nestedPath + "]");
}
} else {
objectMapper = context.mapperService().resolveClosestNestedObjectMapper(fieldName);
}
if (objectMapper != null && objectMapper.nested().isNested()) {
Filter rootDocumentsFilter = context.filterCache().cache(NonNestedDocsFilter.INSTANCE);
Filter innerDocumentsFilter;
if (nestedFilter != null) {
innerDocumentsFilter = context.filterCache().cache(nestedFilter);
} else {
innerDocumentsFilter = context.filterCache().cache(objectMapper.nestedTypeFilter());
}
geoDistanceComparatorSource = new NestedFieldComparatorSource(
sortMode, geoDistanceComparatorSource, rootDocumentsFilter, innerDocumentsFilter
);
}
return new SortField(fieldName, geoDistanceComparatorSource, reverse);
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_sort_GeoDistanceSortParser.java
|
602 |
public class MasterConfirmationOperation extends AbstractClusterOperation {
@Override
public void run() {
final Address endpoint = getCallerAddress();
if (endpoint == null) {
return;
}
final ClusterServiceImpl clusterService = getService();
final ILogger logger = getNodeEngine().getLogger(MasterConfirmationOperation.class.getName());
final MemberImpl member = clusterService.getMember(endpoint);
if (member == null) {
logger.warning("MasterConfirmation has been received from " + endpoint
+ ", but it is not a member of this cluster!");
OperationService operationService = getNodeEngine().getOperationService();
operationService.send(new MemberRemoveOperation(clusterService.getThisAddress()), endpoint);
} else {
if (clusterService.isMaster()) {
clusterService.acceptMasterConfirmation(member);
} else {
logger.warning(endpoint + " has sent MasterConfirmation, but this node is not master!");
}
}
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_cluster_MasterConfirmationOperation.java
|
314 |
new Thread() {
public void run() {
map.tryPut(key, "NEW_VALUE", 1, TimeUnit.SECONDS);
tryPutReturned.countDown();
}
}.start();
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapLockTest.java
|
2,084 |
public class FileSystemUtils {
private static ESLogger logger = ESLoggerFactory.getLogger(FileSystemUtils.class.getName());
private static final long mkdirsStallTimeout = TimeValue.timeValueMinutes(5).millis();
private static final Object mkdirsMutex = new Object();
private static volatile Thread mkdirsThread;
private static volatile long mkdirsStartTime;
public static boolean mkdirs(File dir) {
synchronized (mkdirsMutex) {
try {
mkdirsThread = Thread.currentThread();
mkdirsStartTime = System.currentTimeMillis();
return dir.mkdirs();
} finally {
mkdirsThread = null;
}
}
}
public static void checkMkdirsStall(long currentTime) {
Thread mkdirsThread1 = mkdirsThread;
long stallTime = currentTime - mkdirsStartTime;
if (mkdirsThread1 != null && (stallTime > mkdirsStallTimeout)) {
logger.error("mkdirs stalled for {} on {}, trying to interrupt", new TimeValue(stallTime), mkdirsThread1.getName());
mkdirsThread1.interrupt(); // try and interrupt it...
}
}
public static int maxOpenFiles(File testDir) {
boolean dirCreated = false;
if (!testDir.exists()) {
dirCreated = true;
testDir.mkdirs();
}
List<RandomAccessFile> files = new ArrayList<RandomAccessFile>();
try {
while (true) {
files.add(new RandomAccessFile(new File(testDir, "tmp" + files.size()), "rw"));
}
} catch (IOException ioe) {
int i = 0;
for (RandomAccessFile raf : files) {
try {
raf.close();
} catch (IOException e) {
// ignore
}
new File(testDir, "tmp" + i++).delete();
}
if (dirCreated) {
deleteRecursively(testDir);
}
}
return files.size();
}
public static boolean hasExtensions(File root, String... extensions) {
if (root != null && root.exists()) {
if (root.isDirectory()) {
File[] children = root.listFiles();
if (children != null) {
for (File child : children) {
if (child.isDirectory()) {
boolean has = hasExtensions(child, extensions);
if (has) {
return true;
}
} else {
for (String extension : extensions) {
if (child.getName().endsWith(extension)) {
return true;
}
}
}
}
}
}
}
return false;
}
/**
* Returns true if at least one of the files exists.
*/
public static boolean exists(File... files) {
for (File file : files) {
if (file.exists()) {
return true;
}
}
return false;
}
public static boolean deleteRecursively(File[] roots) {
boolean deleted = true;
for (File root : roots) {
deleted &= deleteRecursively(root);
}
return deleted;
}
public static boolean deleteRecursively(File root) {
return deleteRecursively(root, true);
}
private static boolean innerDeleteRecursively(File root) {
return deleteRecursively(root, true);
}
/**
* Delete the supplied {@link java.io.File} - for directories,
* recursively delete any nested directories or files as well.
*
* @param root the root <code>File</code> to delete
* @param deleteRoot whether or not to delete the root itself or just the content of the root.
* @return <code>true</code> if the <code>File</code> was deleted,
* otherwise <code>false</code>
*/
public static boolean deleteRecursively(File root, boolean deleteRoot) {
if (root != null && root.exists()) {
if (root.isDirectory()) {
File[] children = root.listFiles();
if (children != null) {
for (File aChildren : children) {
innerDeleteRecursively(aChildren);
}
}
}
if (deleteRoot) {
return root.delete();
} else {
return true;
}
}
return false;
}
public static void syncFile(File fileToSync) throws IOException {
boolean success = false;
int retryCount = 0;
IOException exc = null;
while (!success && retryCount < 5) {
retryCount++;
RandomAccessFile file = null;
try {
try {
file = new RandomAccessFile(fileToSync, "rw");
file.getFD().sync();
success = true;
} finally {
if (file != null)
file.close();
}
} catch (IOException ioe) {
if (exc == null)
exc = ioe;
try {
// Pause 5 msec
Thread.sleep(5);
} catch (InterruptedException ie) {
throw new InterruptedIOException(ie.getMessage());
}
}
}
}
public static void copyFile(File sourceFile, File destinationFile) throws IOException {
FileInputStream sourceIs = null;
FileChannel source = null;
FileOutputStream destinationOs = null;
FileChannel destination = null;
try {
sourceIs = new FileInputStream(sourceFile);
source = sourceIs.getChannel();
destinationOs = new FileOutputStream(destinationFile);
destination = destinationOs.getChannel();
destination.transferFrom(source, 0, source.size());
} finally {
if (source != null) {
source.close();
}
if (sourceIs != null) {
sourceIs.close();
}
if (destination != null) {
destination.close();
}
if (destinationOs != null) {
destinationOs.close();
}
}
}
private FileSystemUtils() {
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_io_FileSystemUtils.java
|
167 |
public interface SecureRequest {
Permission getRequiredPermission();
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_SecureRequest.java
|
42 |
public interface ONavigableMap<K, V> extends SortedMap<K, V> {
/**
* Returns a key-value mapping associated with the greatest key strictly less than the given key, or {@code null} if there is no
* such key.
*
* @param key
* the key
* @return an entry with the greatest key less than {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
Map.Entry<K, V> lowerEntry(K key);
/**
* Returns the greatest key strictly less than the given key, or {@code null} if there is no such key.
*
* @param key
* the key
* @return the greatest key less than {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
K lowerKey(K key);
/**
* Returns a key-value mapping associated with the greatest key less than or equal to the given key, or {@code null} if there is
* no such key.
*
* @param key
* the key
* @return an entry with the greatest key less than or equal to {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
Map.Entry<K, V> floorEntry(K key);
/**
* Returns the greatest key less than or equal to the given key, or {@code null} if there is no such key.
*
* @param key
* the key
* @return the greatest key less than or equal to {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
K floorKey(K key);
/**
* Returns a key-value mapping associated with the least key greater than or equal to the given key, or {@code null} if there is
* no such key.
*
* @param key
* the key
* @return an entry with the least key greater than or equal to {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
Map.Entry<K, V> ceilingEntry(K key);
/**
* Returns the least key greater than or equal to the given key, or {@code null} if there is no such key.
*
* @param key
* the key
* @return the least key greater than or equal to {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
K ceilingKey(K key);
/**
* Returns a key-value mapping associated with the least key strictly greater than the given key, or {@code null} if there is no
* such key.
*
* @param key
* the key
* @return an entry with the least key greater than {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
Map.Entry<K, V> higherEntry(K key);
/**
* Returns the least key strictly greater than the given key, or {@code null} if there is no such key.
*
* @param key
* the key
* @return the least key greater than {@code key}, or {@code null} if there is no such key
* @throws ClassCastException
* if the specified key cannot be compared with the keys currently in the map
* @throws NullPointerException
* if the specified key is null and this map does not permit null keys
*/
K higherKey(K key);
/**
* Returns a key-value mapping associated with the least key in this map, or {@code null} if the map is empty.
*
* @return an entry with the least key, or {@code null} if this map is empty
*/
Map.Entry<K, V> firstEntry();
/**
* Returns a key-value mapping associated with the greatest key in this map, or {@code null} if the map is empty.
*
* @return an entry with the greatest key, or {@code null} if this map is empty
*/
Map.Entry<K, V> lastEntry();
/**
* Removes and returns a key-value mapping associated with the least key in this map, or {@code null} if the map is empty.
*
* @return the removed first entry of this map, or {@code null} if this map is empty
*/
Map.Entry<K, V> pollFirstEntry();
/**
* Removes and returns a key-value mapping associated with the greatest key in this map, or {@code null} if the map is empty.
*
* @return the removed last entry of this map, or {@code null} if this map is empty
*/
Map.Entry<K, V> pollLastEntry();
/**
* Returns a reverse order view of the mappings contained in this map. The descending map is backed by this map, so changes to the
* map are reflected in the descending map, and vice-versa. If either map is modified while an iteration over a collection view of
* either map is in progress (except through the iterator's own {@code remove} operation), the results of the iteration are
* undefined.
*
* <p>
* The returned map has an ordering equivalent to
* <tt>{@link Collections#reverseOrder(Comparator) Collections.reverseOrder}(comparator())</tt>. The expression
* {@code m.descendingMap().descendingMap()} returns a view of {@code m} essentially equivalent to {@code m}.
*
* @return a reverse order view of this map
*/
ONavigableMap<K, V> descendingMap();
/**
* Returns a {@link ONavigableSet} view of the keys contained in this map. The set's iterator returns the keys in ascending order.
* The set is backed by the map, so changes to the map are reflected in the set, and vice-versa. If the map is modified while an
* iteration over the set is in progress (except through the iterator's own {@code remove} operation), the results of the
* iteration are undefined. The set supports element removal, which removes the corresponding mapping from the map, via the
* {@code Iterator.remove}, {@code Set.remove}, {@code removeAll}, {@code retainAll}, and {@code clear} operations. It does not
* support the {@code add} or {@code addAll} operations.
*
* @return a navigable set view of the keys in this map
*/
ONavigableSet<K> navigableKeySet();
/**
* Returns a reverse order {@link ONavigableSet} view of the keys contained in this map. The set's iterator returns the keys in
* descending order. The set is backed by the map, so changes to the map are reflected in the set, and vice-versa. If the map is
* modified while an iteration over the set is in progress (except through the iterator's own {@code remove} operation), the
* results of the iteration are undefined. The set supports element removal, which removes the corresponding mapping from the map,
* via the {@code Iterator.remove}, {@code Set.remove}, {@code removeAll}, {@code retainAll}, and {@code clear} operations. It
* does not support the {@code add} or {@code addAll} operations.
*
* @return a reverse order navigable set view of the keys in this map
*/
ONavigableSet<K> descendingKeySet();
/**
* Returns a view of the portion of this map whose keys range from {@code fromKey} to {@code toKey}. If {@code fromKey} and
* {@code toKey} are equal, the returned map is empty unless {@code fromExclusive} and {@code toExclusive} are both true. The
* returned map is backed by this map, so changes in the returned map are reflected in this map, and vice-versa. The returned map
* supports all optional map operations that this map supports.
*
* <p>
* The returned map will throw an {@code IllegalArgumentException} on an attempt to insert a key outside of its range, or to
* construct a submap either of whose endpoints lie outside its range.
*
* @param fromKey
* low endpoint of the keys in the returned map
* @param fromInclusive
* {@code true} if the low endpoint is to be included in the returned view
* @param toKey
* high endpoint of the keys in the returned map
* @param toInclusive
* {@code true} if the high endpoint is to be included in the returned view
* @return a view of the portion of this map whose keys range from {@code fromKey} to {@code toKey}
* @throws ClassCastException
* if {@code fromKey} and {@code toKey} cannot be compared to one another using this map's comparator (or, if the map
* has no comparator, using natural ordering). Implementations may, but are not required to, throw this exception if
* {@code fromKey} or {@code toKey} cannot be compared to keys currently in the map.
* @throws NullPointerException
* if {@code fromKey} or {@code toKey} is null and this map does not permit null keys
* @throws IllegalArgumentException
* if {@code fromKey} is greater than {@code toKey}; or if this map itself has a restricted range, and {@code fromKey}
* or {@code toKey} lies outside the bounds of the range
*/
ONavigableMap<K, V> subMap(K fromKey, boolean fromInclusive, K toKey, boolean toInclusive);
/**
* Returns a view of the portion of this map whose keys are less than (or equal to, if {@code inclusive} is true) {@code toKey}.
* The returned map is backed by this map, so changes in the returned map are reflected in this map, and vice-versa. The returned
* map supports all optional map operations that this map supports.
*
* <p>
* The returned map will throw an {@code IllegalArgumentException} on an attempt to insert a key outside its range.
*
* @param toKey
* high endpoint of the keys in the returned map
* @param inclusive
* {@code true} if the high endpoint is to be included in the returned view
* @return a view of the portion of this map whose keys are less than (or equal to, if {@code inclusive} is true) {@code toKey}
* @throws ClassCastException
* if {@code toKey} is not compatible with this map's comparator (or, if the map has no comparator, if {@code toKey}
* does not implement {@link Comparable}). Implementations may, but are not required to, throw this exception if
* {@code toKey} cannot be compared to keys currently in the map.
* @throws NullPointerException
* if {@code toKey} is null and this map does not permit null keys
* @throws IllegalArgumentException
* if this map itself has a restricted range, and {@code toKey} lies outside the bounds of the range
*/
ONavigableMap<K, V> headMap(K toKey, boolean inclusive);
/**
* Returns a view of the portion of this map whose keys are greater than (or equal to, if {@code inclusive} is true)
* {@code fromKey}. The returned map is backed by this map, so changes in the returned map are reflected in this map, and
* vice-versa. The returned map supports all optional map operations that this map supports.
*
* <p>
* The returned map will throw an {@code IllegalArgumentException} on an attempt to insert a key outside its range.
*
* @param fromKey
* low endpoint of the keys in the returned map
* @param inclusive
* {@code true} if the low endpoint is to be included in the returned view
* @return a view of the portion of this map whose keys are greater than (or equal to, if {@code inclusive} is true)
* {@code fromKey}
* @throws ClassCastException
* if {@code fromKey} is not compatible with this map's comparator (or, if the map has no comparator, if {@code fromKey}
* does not implement {@link Comparable}). Implementations may, but are not required to, throw this exception if
* {@code fromKey} cannot be compared to keys currently in the map.
* @throws NullPointerException
* if {@code fromKey} is null and this map does not permit null keys
* @throws IllegalArgumentException
* if this map itself has a restricted range, and {@code fromKey} lies outside the bounds of the range
*/
ONavigableMap<K, V> tailMap(K fromKey, boolean inclusive);
/**
* {@inheritDoc}
*
* <p>
* Equivalent to {@code subMap(fromKey, true, toKey, false)}.
*
* @throws ClassCastException
* {@inheritDoc}
* @throws NullPointerException
* {@inheritDoc}
* @throws IllegalArgumentException
* {@inheritDoc}
*/
SortedMap<K, V> subMap(K fromKey, K toKey);
/**
* {@inheritDoc}
*
* <p>
* Equivalent to {@code headMap(toKey, false)}.
*
* @throws ClassCastException
* {@inheritDoc}
* @throws NullPointerException
* {@inheritDoc}
* @throws IllegalArgumentException
* {@inheritDoc}
*/
SortedMap<K, V> headMap(K toKey);
/**
* {@inheritDoc}
*
* <p>
* Equivalent to {@code tailMap(fromKey, true)}.
*
* @throws ClassCastException
* {@inheritDoc}
* @throws NullPointerException
* {@inheritDoc}
* @throws IllegalArgumentException
* {@inheritDoc}
*/
SortedMap<K, V> tailMap(K fromKey);
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_collection_ONavigableMap.java
|
1,430 |
static class UpdateTask extends MappingTask {
final String type;
final CompressedString mappingSource;
final long order; // -1 for unknown
final String nodeId; // null fr unknown
final ClusterStateUpdateListener listener;
UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, long order, String nodeId, ClusterStateUpdateListener listener) {
super(index, indexUUID);
this.type = type;
this.mappingSource = mappingSource;
this.order = order;
this.nodeId = nodeId;
this.listener = listener;
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MetaDataMappingService.java
|
615 |
public class IndexShardStats implements Iterable<ShardStats>, Streamable {
private ShardId shardId;
private ShardStats[] shards;
private IndexShardStats() {}
public IndexShardStats(ShardId shardId, ShardStats[] shards) {
this.shardId = shardId;
this.shards = shards;
}
public ShardId getShardId() {
return this.shardId;
}
public ShardStats[] getShards() {
return shards;
}
public ShardStats getAt(int position) {
return shards[position];
}
@Override
public Iterator<ShardStats> iterator() {
return Iterators.forArray(shards);
}
private CommonStats total = null;
public CommonStats getTotal() {
if (total != null) {
return total;
}
CommonStats stats = new CommonStats();
for (ShardStats shard : shards) {
stats.add(shard.getStats());
}
total = stats;
return stats;
}
private CommonStats primary = null;
public CommonStats getPrimary() {
if (primary != null) {
return primary;
}
CommonStats stats = new CommonStats();
for (ShardStats shard : shards) {
if (shard.getShardRouting().primary()) {
stats.add(shard.getStats());
}
}
primary = stats;
return stats;
}
@Override
public void readFrom(StreamInput in) throws IOException {
shardId = ShardId.readShardId(in);
int shardSize = in.readVInt();
shards = new ShardStats[shardSize];
for (int i = 0; i < shardSize; i++) {
shards[i] = ShardStats.readShardStats(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
shardId.writeTo(out);
out.writeVInt(shards.length);
for (ShardStats stats : shards) {
stats.writeTo(out);
}
}
public static IndexShardStats readIndexShardStats(StreamInput in) throws IOException {
IndexShardStats indexShardStats = new IndexShardStats();
indexShardStats.readFrom(in);
return indexShardStats;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_stats_IndexShardStats.java
|
501 |
public class CreateIndexAction extends IndicesAction<CreateIndexRequest, CreateIndexResponse, CreateIndexRequestBuilder> {
public static final CreateIndexAction INSTANCE = new CreateIndexAction();
public static final String NAME = "indices/create";
private CreateIndexAction() {
super(NAME);
}
@Override
public CreateIndexResponse newResponse() {
return new CreateIndexResponse();
}
@Override
public CreateIndexRequestBuilder newRequestBuilder(IndicesAdminClient client) {
return new CreateIndexRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_create_CreateIndexAction.java
|
427 |
public class ClusterStateRequest extends MasterNodeReadOperationRequest<ClusterStateRequest> {
private boolean routingTable = true;
private boolean nodes = true;
private boolean metaData = true;
private boolean blocks = true;
private String[] indices = Strings.EMPTY_ARRAY;
private String[] indexTemplates = Strings.EMPTY_ARRAY;
public ClusterStateRequest() {
}
@Override
public ActionRequestValidationException validate() {
return null;
}
public ClusterStateRequest all() {
routingTable = true;
nodes = true;
metaData = true;
blocks = true;
indices = Strings.EMPTY_ARRAY;
indexTemplates = Strings.EMPTY_ARRAY;
return this;
}
public ClusterStateRequest clear() {
routingTable = false;
nodes = false;
metaData = false;
blocks = false;
indices = Strings.EMPTY_ARRAY;
indexTemplates = Strings.EMPTY_ARRAY;
return this;
}
public boolean routingTable() {
return routingTable;
}
public ClusterStateRequest routingTable(boolean routingTable) {
this.routingTable = routingTable;
return this;
}
public boolean nodes() {
return nodes;
}
public ClusterStateRequest nodes(boolean nodes) {
this.nodes = nodes;
return this;
}
public boolean metaData() {
return metaData;
}
public ClusterStateRequest metaData(boolean metaData) {
this.metaData = metaData;
return this;
}
public boolean blocks() {
return blocks;
}
public ClusterStateRequest blocks(boolean blocks) {
this.blocks = blocks;
return this;
}
public String[] indices() {
return indices;
}
public ClusterStateRequest indices(String... indices) {
this.indices = indices;
return this;
}
public String[] indexTemplates() {
return this.indexTemplates;
}
public ClusterStateRequest indexTemplates(String... indexTemplates) {
this.indexTemplates = indexTemplates;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
routingTable = in.readBoolean();
nodes = in.readBoolean();
metaData = in.readBoolean();
blocks = in.readBoolean();
indices = in.readStringArray();
indexTemplates = in.readStringArray();
readLocal(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBoolean(routingTable);
out.writeBoolean(nodes);
out.writeBoolean(metaData);
out.writeBoolean(blocks);
out.writeStringArray(indices);
out.writeStringArray(indexTemplates);
writeLocal(out);
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_cluster_state_ClusterStateRequest.java
|
5,075 |
class Reaper implements Runnable {
@Override
public void run() {
long time = threadPool.estimatedTimeInMillis();
for (SearchContext context : activeContexts.values()) {
if (context.lastAccessTime() == -1) { // its being processed or timeout is disabled
continue;
}
if ((time - context.lastAccessTime() > context.keepAlive())) {
freeContext(context);
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_search_SearchService.java
|
200 |
public static class Order {
public static final int Audit = 99000;
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_audit_Auditable.java
|
425 |
future.andThen(new ExecutionCallback() {
@Override
public void onResponse(Object response) {
try {
if (collator != null) {
response = collator.collate(((Map) response).entrySet());
}
} finally {
completableFuture.setResult(response);
trackableJobs.remove(jobId);
}
}
@Override
public void onFailure(Throwable t) {
try {
if (t instanceof ExecutionException
&& t.getCause() instanceof CancellationException) {
t = t.getCause();
}
completableFuture.setResult(t);
} finally {
trackableJobs.remove(jobId);
}
}
});
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_proxy_ClientMapReduceProxy.java
|
466 |
public interface SandBoxDao {
public SandBox retrieve(Long id);
public SandBox retrieveSandBoxByType(Site site, SandBoxType sandboxType);
public SandBox retrieveNamedSandBox(Site site, SandBoxType sandboxType, String sandboxName);
public SandBox persist(SandBox entity);
public SandBox createSandBox(Site site, String sandBoxName, SandBoxType sandBoxType);
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_sandbox_dao_SandBoxDao.java
|
2,733 |
public final class SerializationServiceImpl implements SerializationService {
private static final int CONSTANT_SERIALIZERS_SIZE = SerializationConstants.CONSTANT_SERIALIZERS_LENGTH;
private static final PartitioningStrategy EMPTY_PARTITIONING_STRATEGY = new PartitioningStrategy() {
public Object getPartitionKey(Object key) {
return null;
}
};
private final IdentityHashMap<Class, SerializerAdapter> constantTypesMap
= new IdentityHashMap<Class, SerializerAdapter>(CONSTANT_SERIALIZERS_SIZE);
private final SerializerAdapter[] constantTypeIds = new SerializerAdapter[CONSTANT_SERIALIZERS_SIZE];
private final ConcurrentMap<Class, SerializerAdapter> typeMap = new ConcurrentHashMap<Class, SerializerAdapter>();
private final ConcurrentMap<Integer, SerializerAdapter> idMap = new ConcurrentHashMap<Integer, SerializerAdapter>();
private final AtomicReference<SerializerAdapter> global = new AtomicReference<SerializerAdapter>();
private final InputOutputFactory inputOutputFactory;
private final Queue<BufferObjectDataOutput> outputPool = new ConcurrentLinkedQueue<BufferObjectDataOutput>();
private final PortableSerializer portableSerializer;
private final SerializerAdapter dataSerializerAdapter;
private final SerializerAdapter portableSerializerAdapter;
private final ClassLoader classLoader;
private final ManagedContext managedContext;
private final SerializationContextImpl serializationContext;
private final PartitioningStrategy globalPartitioningStrategy;
private final int outputBufferSize;
private volatile boolean active = true;
SerializationServiceImpl(InputOutputFactory inputOutputFactory, int version, ClassLoader classLoader,
Map<Integer, ? extends DataSerializableFactory> dataSerializableFactories,
Map<Integer, ? extends PortableFactory> portableFactories,
Collection<ClassDefinition> classDefinitions, boolean checkClassDefErrors,
ManagedContext managedContext, PartitioningStrategy partitionStrategy,
int initialOutputBufferSize,
boolean enableCompression, boolean enableSharedObject) {
this.inputOutputFactory = inputOutputFactory;
this.classLoader = classLoader;
this.managedContext = managedContext;
this.globalPartitioningStrategy = partitionStrategy;
this.outputBufferSize = initialOutputBufferSize;
PortableHookLoader loader = new PortableHookLoader(portableFactories, classLoader);
serializationContext = new SerializationContextImpl(this, loader.getFactories().keySet(), version);
for (ClassDefinition cd : loader.getDefinitions()) {
serializationContext.registerClassDefinition(cd);
}
dataSerializerAdapter = new StreamSerializerAdapter(this, new DataSerializer(dataSerializableFactories, classLoader));
portableSerializer = new PortableSerializer(serializationContext, loader.getFactories());
portableSerializerAdapter = new StreamSerializerAdapter(this, portableSerializer);
registerConstant(DataSerializable.class, dataSerializerAdapter);
registerConstant(Portable.class, portableSerializerAdapter);
registerConstant(Byte.class, new ByteSerializer());
registerConstant(Boolean.class, new BooleanSerializer());
registerConstant(Character.class, new CharSerializer());
registerConstant(Short.class, new ShortSerializer());
registerConstant(Integer.class, new IntegerSerializer());
registerConstant(Long.class, new LongSerializer());
registerConstant(Float.class, new FloatSerializer());
registerConstant(Double.class, new DoubleSerializer());
registerConstant(byte[].class, new TheByteArraySerializer());
registerConstant(char[].class, new CharArraySerializer());
registerConstant(short[].class, new ShortArraySerializer());
registerConstant(int[].class, new IntegerArraySerializer());
registerConstant(long[].class, new LongArraySerializer());
registerConstant(float[].class, new FloatArraySerializer());
registerConstant(double[].class, new DoubleArraySerializer());
registerConstant(String.class, new StringSerializer());
safeRegister(Date.class, new DateSerializer());
safeRegister(BigInteger.class, new BigIntegerSerializer());
safeRegister(BigDecimal.class, new BigDecimalSerializer());
safeRegister(Externalizable.class, new Externalizer(enableCompression));
safeRegister(Serializable.class, new ObjectSerializer(enableSharedObject, enableCompression));
safeRegister(Class.class, new ClassSerializer());
safeRegister(Enum.class, new EnumSerializer());
registerClassDefinitions(classDefinitions, checkClassDefErrors);
}
private void registerClassDefinitions(final Collection<ClassDefinition> classDefinitions, boolean checkClassDefErrors) {
final Map<Integer, ClassDefinition> classDefMap = new HashMap<Integer, ClassDefinition>(classDefinitions.size());
for (ClassDefinition cd : classDefinitions) {
if (classDefMap.containsKey(cd.getClassId())) {
throw new HazelcastSerializationException("Duplicate registration found for class-id[" + cd.getClassId() + "]!");
}
classDefMap.put(cd.getClassId(), cd);
}
for (ClassDefinition classDefinition : classDefinitions) {
registerClassDefinition(classDefinition, classDefMap, checkClassDefErrors);
}
}
private void registerClassDefinition(ClassDefinition cd, Map<Integer,
ClassDefinition> classDefMap, boolean checkClassDefErrors) {
for (int i = 0; i < cd.getFieldCount(); i++) {
FieldDefinition fd = cd.get(i);
if (fd.getType() == FieldType.PORTABLE || fd.getType() == FieldType.PORTABLE_ARRAY) {
int classId = fd.getClassId();
ClassDefinition nestedCd = classDefMap.get(classId);
if (nestedCd != null) {
((ClassDefinitionImpl) cd).addClassDef(nestedCd);
registerClassDefinition(nestedCd, classDefMap, checkClassDefErrors);
serializationContext.registerClassDefinition(nestedCd);
} else if (checkClassDefErrors) {
throw new HazelcastSerializationException("Could not find registered ClassDefinition for class-id: "
+ classId);
}
}
}
serializationContext.registerClassDefinition(cd);
}
public Data toData(final Object obj) {
return toData(obj, globalPartitioningStrategy);
}
@SuppressWarnings("unchecked")
public Data toData(Object obj, PartitioningStrategy strategy) {
if (obj == null) {
return null;
}
if (obj instanceof Data) {
return (Data) obj;
}
try {
final SerializerAdapter serializer = serializerFor(obj.getClass());
if (serializer == null) {
if (active) {
throw new HazelcastSerializationException("There is no suitable serializer for " + obj.getClass());
}
throw new HazelcastInstanceNotActiveException();
}
final byte[] bytes = serializer.write(obj);
final Data data = new Data(serializer.getTypeId(), bytes);
if (obj instanceof Portable) {
final Portable portable = (Portable) obj;
data.classDefinition = serializationContext.lookup(portable.getFactoryId(), portable.getClassId());
}
if (strategy == null) {
strategy = globalPartitioningStrategy;
}
if (strategy != null) {
Object pk = strategy.getPartitionKey(obj);
if (pk != null && pk != obj) {
final Data partitionKey = toData(pk, EMPTY_PARTITIONING_STRATEGY);
data.partitionHash = (partitionKey == null) ? -1 : partitionKey.getPartitionHash();
}
}
return data;
} catch (Throwable e) {
handleException(e);
}
return null;
}
public <T> T toObject(final Object object) {
if (!(object instanceof Data)) {
return (T) object;
}
Data data = (Data) object;
if (data.bufferSize() == 0 && data.isDataSerializable()) {
return null;
}
try {
final int typeId = data.type;
final SerializerAdapter serializer = serializerFor(typeId);
if (serializer == null) {
if (active) {
throw new HazelcastSerializationException("There is no suitable de-serializer for type " + typeId);
}
throw new HazelcastInstanceNotActiveException();
}
if (typeId == SerializationConstants.CONSTANT_TYPE_PORTABLE) {
serializationContext.registerClassDefinition(data.classDefinition);
}
Object obj = serializer.read(data);
if (managedContext != null) {
obj = managedContext.initialize(obj);
}
return (T) obj;
} catch (Throwable e) {
handleException(e);
}
return null;
}
public void writeObject(final ObjectDataOutput out, final Object obj) {
final boolean isNull = obj == null;
try {
out.writeBoolean(isNull);
if (isNull) {
return;
}
final SerializerAdapter serializer = serializerFor(obj.getClass());
if (serializer == null) {
if (active) {
throw new HazelcastSerializationException("There is no suitable serializer for " + obj.getClass());
}
throw new HazelcastInstanceNotActiveException();
}
out.writeInt(serializer.getTypeId());
if (obj instanceof Portable) {
final Portable portable = (Portable) obj;
ClassDefinition classDefinition = serializationContext.lookupOrRegisterClassDefinition(portable);
classDefinition.writeData(out);
}
serializer.write(out, obj);
} catch (Throwable e) {
handleException(e);
}
}
public Object readObject(final ObjectDataInput in) {
try {
final boolean isNull = in.readBoolean();
if (isNull) {
return null;
}
final int typeId = in.readInt();
final SerializerAdapter serializer = serializerFor(typeId);
if (serializer == null) {
if (active) {
throw new HazelcastSerializationException("There is no suitable de-serializer for type " + typeId);
}
throw new HazelcastInstanceNotActiveException();
}
if (typeId == SerializationConstants.CONSTANT_TYPE_PORTABLE && in instanceof PortableContextAwareInputStream) {
ClassDefinition classDefinition = new ClassDefinitionImpl();
classDefinition.readData(in);
classDefinition = serializationContext.registerClassDefinition(classDefinition);
PortableContextAwareInputStream ctxIn = (PortableContextAwareInputStream) in;
ctxIn.setClassDefinition(classDefinition);
}
Object obj = serializer.read(in);
if (managedContext != null) {
obj = managedContext.initialize(obj);
}
return obj;
} catch (Throwable e) {
handleException(e);
}
return null;
}
private void handleException(Throwable e) {
if (e instanceof OutOfMemoryError) {
OutOfMemoryErrorDispatcher.onOutOfMemory((OutOfMemoryError) e);
return;
}
if (e instanceof HazelcastSerializationException) {
throw (HazelcastSerializationException) e;
}
throw new HazelcastSerializationException(e);
}
BufferObjectDataOutput pop() {
BufferObjectDataOutput out = outputPool.poll();
if (out == null) {
out = inputOutputFactory.createOutput(outputBufferSize, this);
}
return out;
}
void push(BufferObjectDataOutput out) {
if (out != null) {
out.clear();
outputPool.offer(out);
}
}
public BufferObjectDataInput createObjectDataInput(byte[] data) {
return inputOutputFactory.createInput(data, this);
}
public BufferObjectDataInput createObjectDataInput(Data data) {
return inputOutputFactory.createInput(data, this);
}
public BufferObjectDataOutput createObjectDataOutput(int size) {
return inputOutputFactory.createOutput(size, this);
}
public ObjectDataOutputStream createObjectDataOutputStream(OutputStream out) {
return new ObjectDataOutputStream(out, this);
}
public ObjectDataInputStream createObjectDataInputStream(InputStream in) {
return new ObjectDataInputStream(in, this);
}
public ObjectDataOutputStream createObjectDataOutputStream(OutputStream out, ByteOrder order) {
return new ObjectDataOutputStream(out, this, order);
}
public ObjectDataInputStream createObjectDataInputStream(InputStream in, ByteOrder order) {
return new ObjectDataInputStream(in, this, order);
}
public void register(Class type, Serializer serializer) {
if (type == null) {
throw new IllegalArgumentException("Class type information is required!");
}
if (serializer.getTypeId() <= 0) {
throw new IllegalArgumentException("Type id must be positive! Current: "
+ serializer.getTypeId() + ", Serializer: " + serializer);
}
safeRegister(type, createSerializerAdapter(serializer));
}
public void registerGlobal(final Serializer serializer) {
SerializerAdapter adapter = createSerializerAdapter(serializer);
if (!global.compareAndSet(null, adapter)) {
throw new IllegalStateException("Global serializer is already registered!");
}
SerializerAdapter current = idMap.putIfAbsent(serializer.getTypeId(), adapter);
if (current != null && current.getImpl().getClass() != adapter.getImpl().getClass()) {
global.compareAndSet(adapter, null);
throw new IllegalStateException("Serializer [" + current.getImpl() + "] has been already registered for type-id: "
+ serializer.getTypeId());
}
}
private SerializerAdapter createSerializerAdapter(Serializer serializer) {
final SerializerAdapter s;
if (serializer instanceof StreamSerializer) {
s = new StreamSerializerAdapter(this, (StreamSerializer) serializer);
} else if (serializer instanceof ByteArraySerializer) {
s = new ByteArraySerializerAdapter((ByteArraySerializer) serializer);
} else {
throw new IllegalArgumentException("Serializer must be instance of either StreamSerializer or ByteArraySerializer!");
}
return s;
}
public SerializerAdapter serializerFor(final Class type) {
if (DataSerializable.class.isAssignableFrom(type)) {
return dataSerializerAdapter;
} else if (Portable.class.isAssignableFrom(type)) {
return portableSerializerAdapter;
} else {
final SerializerAdapter serializer;
if ((serializer = constantTypesMap.get(type)) != null) {
return serializer;
}
}
SerializerAdapter serializer = typeMap.get(type);
if (serializer == null) {
// look for super classes
Class typeSuperclass = type.getSuperclass();
final Set<Class> interfaces = new LinkedHashSet<Class>(5);
getInterfaces(type, interfaces);
while (typeSuperclass != null) {
if ((serializer = registerFromSuperType(type, typeSuperclass)) != null) {
break;
}
getInterfaces(typeSuperclass, interfaces);
typeSuperclass = typeSuperclass.getSuperclass();
}
if (serializer == null) {
// look for interfaces
for (Class typeInterface : interfaces) {
if ((serializer = registerFromSuperType(type, typeInterface)) != null) {
break;
}
}
}
if (serializer == null && (serializer = global.get()) != null) {
safeRegister(type, serializer);
}
}
return serializer;
}
private static void getInterfaces(Class clazz, Set<Class> interfaces) {
final Class[] classes = clazz.getInterfaces();
if (classes.length > 0) {
Collections.addAll(interfaces, classes);
for (Class cl : classes) {
getInterfaces(cl, interfaces);
}
}
}
private SerializerAdapter registerFromSuperType(final Class type, final Class superType) {
final SerializerAdapter serializer = typeMap.get(superType);
if (serializer != null) {
safeRegister(type, serializer);
}
return serializer;
}
private void registerConstant(Class type, Serializer serializer) {
registerConstant(type, createSerializerAdapter(serializer));
}
private void registerConstant(Class type, SerializerAdapter serializer) {
constantTypesMap.put(type, serializer);
constantTypeIds[indexForDefaultType(serializer.getTypeId())] = serializer;
}
void safeRegister(final Class type, final Serializer serializer) {
safeRegister(type, createSerializerAdapter(serializer));
}
private void safeRegister(final Class type, final SerializerAdapter serializer) {
if (constantTypesMap.containsKey(type)) {
throw new IllegalArgumentException("[" + type + "] serializer cannot be overridden!");
}
SerializerAdapter current = typeMap.putIfAbsent(type, serializer);
if (current != null && current.getImpl().getClass() != serializer.getImpl().getClass()) {
throw new IllegalStateException("Serializer[" + current.getImpl()
+ "] has been already registered for type: " + type);
}
current = idMap.putIfAbsent(serializer.getTypeId(), serializer);
if (current != null && current.getImpl().getClass() != serializer.getImpl().getClass()) {
throw new IllegalStateException("Serializer [" + current.getImpl() + "] has been already registered for type-id: "
+ serializer.getTypeId());
}
}
public SerializerAdapter serializerFor(final int typeId) {
if (typeId < 0) {
final int index = indexForDefaultType(typeId);
if (index < CONSTANT_SERIALIZERS_SIZE) {
return constantTypeIds[index];
}
}
return idMap.get(typeId);
}
private int indexForDefaultType(final int typeId) {
return -typeId - 1;
}
public SerializationContext getSerializationContext() {
return serializationContext;
}
public PortableReader createPortableReader(Data data) {
return new DefaultPortableReader(portableSerializer, createObjectDataInput(data), data.getClassDefinition());
}
public void destroy() {
active = false;
for (SerializerAdapter serializer : typeMap.values()) {
serializer.destroy();
}
typeMap.clear();
idMap.clear();
global.set(null);
constantTypesMap.clear();
for (BufferObjectDataOutput output : outputPool) {
IOUtil.closeResource(output);
}
outputPool.clear();
}
public ClassLoader getClassLoader() {
return classLoader;
}
public ManagedContext getManagedContext() {
return managedContext;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_nio_serialization_SerializationServiceImpl.java
|
77 |
@SuppressWarnings("serial")
static final class MapReduceMappingsTask<K,V,U>
extends BulkTask<K,V,U> {
final BiFun<? super K, ? super V, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
U result;
MapReduceMappingsTask<K,V,U> rights, nextRight;
MapReduceMappingsTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceMappingsTask<K,V,U> nextRight,
BiFun<? super K, ? super V, ? extends U> transformer,
BiFun<? super U, ? super U, ? extends U> reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.reducer = reducer;
}
public final U getRawResult() { return result; }
public final void compute() {
final BiFun<? super K, ? super V, ? extends U> transformer;
final BiFun<? super U, ? super U, ? extends U> reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceMappingsTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, reducer)).fork();
}
U r = null;
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p.key, p.val)) != null)
r = (r == null) ? u : reducer.apply(r, u);
}
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceMappingsTask<K,V,U>
t = (MapReduceMappingsTask<K,V,U>)c,
s = t.rights;
while (s != null) {
U tr, sr;
if ((sr = s.result) != null)
t.result = (((tr = t.result) == null) ? sr :
reducer.apply(tr, sr));
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
3,685 |
public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements InternalMapper, RootMapper {
public static final String NAME = "_source";
public static final String CONTENT_TYPE = "_source";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = SourceFieldMapper.NAME;
public static final boolean ENABLED = true;
public static final long COMPRESS_THRESHOLD = -1;
public static final String FORMAT = null; // default format is to use the one provided
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setIndexed(false);
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.freeze();
}
}
public static class Builder extends Mapper.Builder<Builder, SourceFieldMapper> {
private boolean enabled = Defaults.ENABLED;
private long compressThreshold = Defaults.COMPRESS_THRESHOLD;
private Boolean compress = null;
private String format = Defaults.FORMAT;
private String[] includes = null;
private String[] excludes = null;
public Builder() {
super(Defaults.NAME);
}
public Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public Builder compress(boolean compress) {
this.compress = compress;
return this;
}
public Builder compressThreshold(long compressThreshold) {
this.compressThreshold = compressThreshold;
return this;
}
public Builder format(String format) {
this.format = format;
return this;
}
public Builder includes(String[] includes) {
this.includes = includes;
return this;
}
public Builder excludes(String[] excludes) {
this.excludes = excludes;
return this;
}
@Override
public SourceFieldMapper build(BuilderContext context) {
return new SourceFieldMapper(name, enabled, format, compress, compressThreshold, includes, excludes);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SourceFieldMapper.Builder builder = source();
for (Map.Entry<String, Object> entry : node.entrySet()) {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress") && fieldNode != null) {
builder.compress(nodeBooleanValue(fieldNode));
} else if (fieldName.equals("compress_threshold") && fieldNode != null) {
if (fieldNode instanceof Number) {
builder.compressThreshold(((Number) fieldNode).longValue());
builder.compress(true);
} else {
builder.compressThreshold(ByteSizeValue.parseBytesSizeValue(fieldNode.toString()).bytes());
builder.compress(true);
}
} else if ("format".equals(fieldName)) {
builder.format(nodeStringValue(fieldNode, null));
} else if (fieldName.equals("includes")) {
List<Object> values = (List<Object>) fieldNode;
String[] includes = new String[values.size()];
for (int i = 0; i < includes.length; i++) {
includes[i] = values.get(i).toString();
}
builder.includes(includes);
} else if (fieldName.equals("excludes")) {
List<Object> values = (List<Object>) fieldNode;
String[] excludes = new String[values.size()];
for (int i = 0; i < excludes.length; i++) {
excludes[i] = values.get(i).toString();
}
builder.excludes(excludes);
}
}
return builder;
}
}
private final boolean enabled;
private Boolean compress;
private long compressThreshold;
private String[] includes;
private String[] excludes;
private String format;
private XContentType formatContentType;
public SourceFieldMapper() {
this(Defaults.NAME, Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null);
}
protected SourceFieldMapper(String name, boolean enabled, String format, Boolean compress, long compressThreshold,
String[] includes, String[] excludes) {
super(new Names(name, name, name, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null, null, null, null); // Only stored.
this.enabled = enabled;
this.compress = compress;
this.compressThreshold = compressThreshold;
this.includes = includes;
this.excludes = excludes;
this.format = format;
this.formatContentType = format == null ? null : XContentType.fromRestContentType(format);
}
public boolean enabled() {
return this.enabled;
}
public String[] excludes() {
return this.excludes != null ? this.excludes : Strings.EMPTY_ARRAY;
}
public String[] includes() {
return this.includes != null ? this.includes : Strings.EMPTY_ARRAY;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return null;
}
@Override
public boolean hasDocValues() {
return false;
}
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void postParse(ParseContext context) throws IOException {
}
@Override
public void parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
}
@Override
public void validate(ParseContext context) throws MapperParsingException {
}
@Override
public boolean includeInObject() {
return false;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!enabled) {
return;
}
if (!fieldType.stored()) {
return;
}
if (context.flyweight()) {
return;
}
BytesReference source = context.source();
boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
if (filtered) {
// we don't update the context source if we filter, we want to keep it as is...
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true);
Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), includes, excludes);
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput streamOutput = bStream;
if (compress != null && compress && (compressThreshold == -1 || source.length() > compressThreshold)) {
streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
}
XContentType contentType = formatContentType;
if (contentType == null) {
contentType = mapTuple.v1();
}
XContentBuilder builder = XContentFactory.contentBuilder(contentType, streamOutput).map(filteredSource);
builder.close();
source = bStream.bytes();
} else if (compress != null && compress && !CompressorFactory.isCompressed(source)) {
if (compressThreshold == -1 || source.length() > compressThreshold) {
BytesStreamOutput bStream = new BytesStreamOutput();
XContentType contentType = XContentFactory.xContentType(source);
if (formatContentType != null && formatContentType != contentType) {
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, CompressorFactory.defaultCompressor().streamOutput(bStream));
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
builder.close();
} else {
StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
source.writeTo(streamOutput);
streamOutput.close();
}
source = bStream.bytes();
// update the data in the context, so it can be compressed and stored compressed outside...
context.source(source);
}
} else if (formatContentType != null) {
// see if we need to convert the content type
Compressor compressor = CompressorFactory.compressor(source);
if (compressor != null) {
CompressedStreamInput compressedStreamInput = compressor.streamInput(source.streamInput());
XContentType contentType = XContentFactory.xContentType(compressedStreamInput);
compressedStreamInput.resetToBufferStart();
if (contentType != formatContentType) {
// we need to reread and store back, compressed....
BytesStreamOutput bStream = new BytesStreamOutput();
StreamOutput streamOutput = CompressorFactory.defaultCompressor().streamOutput(bStream);
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, streamOutput);
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(compressedStreamInput));
builder.close();
source = bStream.bytes();
// update the data in the context, so we store it in the translog in this format
context.source(source);
} else {
compressedStreamInput.close();
}
} else {
XContentType contentType = XContentFactory.xContentType(source);
if (contentType != formatContentType) {
// we need to reread and store back
// we need to reread and store back, compressed....
BytesStreamOutput bStream = new BytesStreamOutput();
XContentBuilder builder = XContentFactory.contentBuilder(formatContentType, bStream);
builder.copyCurrentStructure(XContentFactory.xContent(contentType).createParser(source));
builder.close();
source = bStream.bytes();
// update the data in the context, so we store it in the translog in this format
context.source(source);
}
}
}
assert source.hasArray();
fields.add(new StoredField(names().indexName(), source.array(), source.arrayOffset(), source.length()));
}
@Override
public byte[] value(Object value) {
if (value == null) {
return null;
}
BytesReference bValue;
if (value instanceof BytesRef) {
bValue = new BytesArray((BytesRef) value);
} else {
bValue = (BytesReference) value;
}
try {
return CompressorFactory.uncompressIfNeeded(bValue).toBytes();
} catch (IOException e) {
throw new ElasticsearchParseException("failed to decompress source", e);
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// all are defaults, no need to write it at all
if (!includeDefaults && enabled == Defaults.ENABLED && compress == null && compressThreshold == -1 && includes == null && excludes == null) {
return builder;
}
builder.startObject(contentType());
if (includeDefaults || enabled != Defaults.ENABLED) {
builder.field("enabled", enabled);
}
if (includeDefaults || !Objects.equal(format, Defaults.FORMAT)) {
builder.field("format", format);
}
if (compress != null) {
builder.field("compress", compress);
} else if (includeDefaults) {
builder.field("compress", false);
}
if (compressThreshold != -1) {
builder.field("compress_threshold", new ByteSizeValue(compressThreshold).toString());
} else if (includeDefaults) {
builder.field("compress_threshold", -1);
}
if (includes != null) {
builder.field("includes", includes);
} else if (includeDefaults) {
builder.field("includes", Strings.EMPTY_ARRAY);
}
if (excludes != null) {
builder.field("excludes", excludes);
} else if (includeDefaults) {
builder.field("excludes", Strings.EMPTY_ARRAY);
}
builder.endObject();
return builder;
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
if (!mergeContext.mergeFlags().simulate()) {
if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress;
}
if (sourceMergeWith.compressThreshold != -1) {
this.compressThreshold = sourceMergeWith.compressThreshold;
}
if (sourceMergeWith.includes != null) {
this.includes = sourceMergeWith.includes;
}
if (sourceMergeWith.excludes != null) {
this.excludes = sourceMergeWith.excludes;
}
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_index_mapper_internal_SourceFieldMapper.java
|
2,155 |
public class AndDocIdSet extends DocIdSet {
private final DocIdSet[] sets;
public AndDocIdSet(DocIdSet[] sets) {
this.sets = sets;
}
@Override
public boolean isCacheable() {
for (DocIdSet set : sets) {
if (!set.isCacheable()) {
return false;
}
}
return true;
}
@Override
public Bits bits() throws IOException {
Bits[] bits = new Bits[sets.length];
for (int i = 0; i < sets.length; i++) {
bits[i] = sets[i].bits();
if (bits[i] == null) {
return null;
}
}
return new AndBits(bits);
}
@Override
public DocIdSetIterator iterator() throws IOException {
// we try and be smart here, if we can iterate through docsets quickly, prefer to iterate
// over them as much as possible, before actually going to "bits" based ones to check
List<DocIdSet> iterators = new ArrayList<DocIdSet>(sets.length);
List<Bits> bits = new ArrayList<Bits>(sets.length);
for (DocIdSet set : sets) {
if (DocIdSets.isFastIterator(set)) {
iterators.add(set);
} else {
Bits bit = set.bits();
if (bit != null) {
bits.add(bit);
} else {
iterators.add(set);
}
}
}
if (bits.isEmpty()) {
return new IteratorBasedIterator(iterators.toArray(new DocIdSet[iterators.size()]));
}
if (iterators.isEmpty()) {
return new BitsDocIdSetIterator(new AndBits(bits.toArray(new Bits[bits.size()])));
}
// combination of both..., first iterating over the "fast" ones, and then checking on the more
// expensive ones
return new BitsDocIdSetIterator.FilteredIterator(
new IteratorBasedIterator(iterators.toArray(new DocIdSet[iterators.size()])),
new AndBits(bits.toArray(new Bits[bits.size()]))
);
}
static class AndBits implements Bits {
private final Bits[] bits;
AndBits(Bits[] bits) {
this.bits = bits;
}
@Override
public boolean get(int index) {
for (Bits bit : bits) {
if (!bit.get(index)) {
return false;
}
}
return true;
}
@Override
public int length() {
return bits[0].length();
}
}
static class IteratorBasedIterator extends DocIdSetIterator {
int lastReturn = -1;
private DocIdSetIterator[] iterators = null;
private final long cost;
IteratorBasedIterator(DocIdSet[] sets) throws IOException {
iterators = new DocIdSetIterator[sets.length];
int j = 0;
long cost = Integer.MAX_VALUE;
for (DocIdSet set : sets) {
if (set == null) {
lastReturn = DocIdSetIterator.NO_MORE_DOCS; // non matching
break;
} else {
DocIdSetIterator dcit = set.iterator();
if (dcit == null) {
lastReturn = DocIdSetIterator.NO_MORE_DOCS; // non matching
break;
}
iterators[j++] = dcit;
cost = Math.min(cost, dcit.cost());
}
}
this.cost = cost;
if (lastReturn != DocIdSetIterator.NO_MORE_DOCS) {
lastReturn = (iterators.length > 0 ? -1 : DocIdSetIterator.NO_MORE_DOCS);
}
}
@Override
public final int docID() {
return lastReturn;
}
@Override
public final int nextDoc() throws IOException {
if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
DocIdSetIterator dcit = iterators[0];
int target = dcit.nextDoc();
int size = iterators.length;
int skip = 0;
int i = 1;
while (i < size) {
if (i != skip) {
dcit = iterators[i];
int docid = dcit.advance(target);
if (docid > target) {
target = docid;
if (i != 0) {
skip = i;
i = 0;
continue;
} else
skip = 0;
}
}
i++;
}
return (lastReturn = target);
}
@Override
public final int advance(int target) throws IOException {
if (lastReturn == DocIdSetIterator.NO_MORE_DOCS) return DocIdSetIterator.NO_MORE_DOCS;
DocIdSetIterator dcit = iterators[0];
target = dcit.advance(target);
int size = iterators.length;
int skip = 0;
int i = 1;
while (i < size) {
if (i != skip) {
dcit = iterators[i];
int docid = dcit.advance(target);
if (docid > target) {
target = docid;
if (i != 0) {
skip = i;
i = 0;
continue;
} else {
skip = 0;
}
}
}
i++;
}
return (lastReturn = target);
}
@Override
public long cost() {
return cost;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_docset_AndDocIdSet.java
|
408 |
public class DeleteSnapshotRequestBuilder extends MasterNodeOperationRequestBuilder<DeleteSnapshotRequest, DeleteSnapshotResponse, DeleteSnapshotRequestBuilder> {
/**
* Constructs delete snapshot request builder
*
* @param clusterAdminClient cluster admin client
*/
public DeleteSnapshotRequestBuilder(ClusterAdminClient clusterAdminClient) {
super((InternalClusterAdminClient) clusterAdminClient, new DeleteSnapshotRequest());
}
/**
* Constructs delete snapshot request builder with specified repository and snapshot names
*
* @param clusterAdminClient cluster admin client
* @param repository repository name
* @param snapshot snapshot name
*/
public DeleteSnapshotRequestBuilder(ClusterAdminClient clusterAdminClient, String repository, String snapshot) {
super((InternalClusterAdminClient) clusterAdminClient, new DeleteSnapshotRequest(repository, snapshot));
}
/**
* Sets the repository name
*
* @param repository repository name
* @return this builder
*/
public DeleteSnapshotRequestBuilder setRepository(String repository) {
request.repository(repository);
return this;
}
/**
* Sets the snapshot name
*
* @param snapshot snapshot name
* @return this builder
*/
public DeleteSnapshotRequestBuilder setSnapshot(String snapshot) {
request.snapshot(snapshot);
return this;
}
@Override
protected void doExecute(ActionListener<DeleteSnapshotResponse> listener) {
((ClusterAdminClient) client).deleteSnapshot(request, listener);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_snapshots_delete_DeleteSnapshotRequestBuilder.java
|
701 |
constructors[TXN_LIST_ADD] = new ConstructorFunction<Integer, Portable>() {
public Portable createNew(Integer arg) {
return new TxnListAddRequest();
}
};
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_CollectionPortableHook.java
|
1,351 |
public class NodeMappingRefreshAction extends AbstractComponent {
private final TransportService transportService;
private final MetaDataMappingService metaDataMappingService;
@Inject
public NodeMappingRefreshAction(Settings settings, TransportService transportService, MetaDataMappingService metaDataMappingService) {
super(settings);
this.transportService = transportService;
this.metaDataMappingService = metaDataMappingService;
transportService.registerHandler(NodeMappingRefreshTransportHandler.ACTION, new NodeMappingRefreshTransportHandler());
}
public void nodeMappingRefresh(final ClusterState state, final NodeMappingRefreshRequest request) throws ElasticsearchException {
DiscoveryNodes nodes = state.nodes();
if (nodes.localNodeMaster()) {
innerMappingRefresh(request);
} else {
transportService.sendRequest(state.nodes().masterNode(),
NodeMappingRefreshTransportHandler.ACTION, request, EmptyTransportResponseHandler.INSTANCE_SAME);
}
}
private void innerMappingRefresh(NodeMappingRefreshRequest request) {
metaDataMappingService.refreshMapping(request.index(), request.indexUUID(), request.types());
}
private class NodeMappingRefreshTransportHandler extends BaseTransportRequestHandler<NodeMappingRefreshRequest> {
static final String ACTION = "cluster/nodeMappingRefresh";
@Override
public NodeMappingRefreshRequest newInstance() {
return new NodeMappingRefreshRequest();
}
@Override
public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel) throws Exception {
innerMappingRefresh(request);
channel.sendResponse(TransportResponse.Empty.INSTANCE);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
public static class NodeMappingRefreshRequest extends TransportRequest {
private String index;
private String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE;
private String[] types;
private String nodeId;
NodeMappingRefreshRequest() {
}
public NodeMappingRefreshRequest(String index, String indexUUID, String[] types, String nodeId) {
this.index = index;
this.indexUUID = indexUUID;
this.types = types;
this.nodeId = nodeId;
}
public String index() {
return index;
}
public String indexUUID() {
return indexUUID;
}
public String[] types() {
return types;
}
public String nodeId() {
return nodeId;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(index);
out.writeStringArray(types);
out.writeString(nodeId);
out.writeString(indexUUID);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
index = in.readString();
types = in.readStringArray();
nodeId = in.readString();
indexUUID = in.readString();
}
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_action_index_NodeMappingRefreshAction.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.