Unnamed: 0
int64 0
6.45k
| func
stringlengths 37
161k
| target
class label 2
classes | project
stringlengths 33
167
|
---|---|---|---|
402 |
public class ORecordTrackedSet extends AbstractCollection<OIdentifiable> implements Set<OIdentifiable>, ORecordElement {
protected final ORecord<?> sourceRecord;
protected Map<Object, Object> map = new HashMap<Object, Object>();
private STATUS status = STATUS.NOT_LOADED;
protected final static Object ENTRY_REMOVAL = new Object();
public ORecordTrackedSet(final ORecord<?> iSourceRecord) {
this.sourceRecord = iSourceRecord;
if (iSourceRecord != null)
iSourceRecord.setDirty();
}
public Iterator<OIdentifiable> iterator() {
return new ORecordTrackedIterator(sourceRecord, map.keySet().iterator());
}
public boolean add(final OIdentifiable e) {
if (map.containsKey(e))
return false;
map.put(e, ENTRY_REMOVAL);
setDirty();
if (e instanceof ODocument)
((ODocument) e).addOwner(this);
return true;
}
@Override
public boolean contains(Object o) {
return map.containsKey(o);
}
public boolean remove(Object o) {
final Object old = map.remove(o);
if (old != null) {
if (o instanceof ODocument)
((ODocument) o).removeOwner(this);
setDirty();
return true;
}
return false;
}
public void clear() {
setDirty();
map.clear();
}
public boolean removeAll(final Collection<?> c) {
boolean changed = false;
for (Object item : c) {
if (map.remove(item) != null)
changed = true;
}
if (changed)
setDirty();
return changed;
}
public boolean addAll(final Collection<? extends OIdentifiable> c) {
if (c == null || c.size() == 0)
return false;
for (OIdentifiable o : c)
add(o);
setDirty();
return true;
}
public boolean retainAll(final Collection<?> c) {
if (c == null || c.size() == 0)
return false;
if (super.removeAll(c)) {
setDirty();
return true;
}
return false;
}
@Override
public int size() {
return map.size();
}
@SuppressWarnings("unchecked")
public ORecordTrackedSet setDirty() {
if (status != STATUS.UNMARSHALLING && sourceRecord != null && !sourceRecord.isDirty())
sourceRecord.setDirty();
return this;
}
public void onBeforeIdentityChanged(final ORID iRID) {
map.remove(iRID);
setDirty();
}
public void onAfterIdentityChanged(final ORecord<?> iRecord) {
map.put(iRecord, ENTRY_REMOVAL);
}
public STATUS getInternalStatus() {
return status;
}
public void setInternalStatus(final STATUS iStatus) {
status = iStatus;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_db_record_ORecordTrackedSet.java
|
1,076 |
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse response) {
UpdateResponse update = new UpdateResponse(response.getIndex(), response.getType(), response.getId(), response.getVersion(), false);
update.setGetResult(updateHelper.extractGetResult(request, response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new Runnable() {
@Override
public void run() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
| 1no label
|
src_main_java_org_elasticsearch_action_update_TransportUpdateAction.java
|
56 |
@SuppressWarnings("serial")
static final class ForEachTransformedEntryTask<K,V,U>
extends BulkTask<K,V,Void> {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final Action<? super U> action;
ForEachTransformedEntryTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
Fun<Map.Entry<K,V>, ? extends U> transformer, Action<? super U> action) {
super(p, b, i, f, t);
this.transformer = transformer; this.action = action;
}
public final void compute() {
final Fun<Map.Entry<K,V>, ? extends U> transformer;
final Action<? super U> action;
if ((transformer = this.transformer) != null &&
(action = this.action) != null) {
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
new ForEachTransformedEntryTask<K,V,U>
(this, batch >>>= 1, baseLimit = h, f, tab,
transformer, action).fork();
}
for (Node<K,V> p; (p = advance()) != null; ) {
U u;
if ((u = transformer.apply(p)) != null)
action.apply(u);
}
propagateCompletion();
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
16 |
public static class TransactionTimeSpanPruneStrategy extends AbstractPruneStrategy
{
private final int timeToKeep;
private final TimeUnit unit;
public TransactionTimeSpanPruneStrategy( FileSystemAbstraction fileSystem, int timeToKeep, TimeUnit unit )
{
super( fileSystem );
this.timeToKeep = timeToKeep;
this.unit = unit;
}
@Override
protected Threshold newThreshold()
{
return new Threshold()
{
private long lowerLimit = System.currentTimeMillis() - unit.toMillis( timeToKeep );
@Override
public boolean reached( File file, long version, LogLoader source )
{
try
{
return source.getFirstStartRecordTimestamp( version ) < lowerLimit;
}
catch ( IOException e )
{
throw new RuntimeException( e );
}
}
};
}
}
| 1no label
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogPruneStrategies.java
|
655 |
@Repository("blCategoryDao")
public class CategoryDaoImpl implements CategoryDao {
protected Long currentDateResolution = 10000L;
protected Date cachedDate = SystemTime.asDate();
protected Date getCurrentDateAfterFactoringInDateResolution() {
Date returnDate = SystemTime.getCurrentDateWithinTimeResolution(cachedDate, currentDateResolution);
if (returnDate != cachedDate) {
if (SystemTime.shouldCacheDate()) {
cachedDate = returnDate;
}
}
return returnDate;
}
@PersistenceContext(unitName="blPU")
protected EntityManager em;
@Resource(name="blEntityConfiguration")
protected EntityConfiguration entityConfiguration;
@Override
public Category save(Category category) {
return em.merge(category);
}
@Override
public Category readCategoryById(Long categoryId) {
return em.find(CategoryImpl.class, categoryId);
}
@Override
@Deprecated
public Category readCategoryByName(String categoryName) {
Query query = em.createNamedQuery("BC_READ_CATEGORY_BY_NAME");
query.setParameter("categoryName", categoryName);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return (Category) query.getSingleResult();
}
@Override
public List<Category> readAllParentCategories() {
CriteriaBuilder builder = em.getCriteriaBuilder();
CriteriaQuery<Category> criteria = builder.createQuery(Category.class);
Root<CategoryImpl> category = criteria.from(CategoryImpl.class);
criteria.select(category);
criteria.where(builder.isNull(category.get("defaultParentCategory")));
TypedQuery<Category> query = em.createQuery(criteria);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
try {
return query.getResultList();
} catch (NoResultException e) {
return null;
}
}
@Override
public List<Category> readCategoriesByName(String categoryName) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_CATEGORY_BY_NAME", Category.class);
query.setParameter("categoryName", categoryName);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Category> readCategoriesByName(String categoryName, int limit, int offset) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_CATEGORY_BY_NAME", Category.class);
query.setParameter("categoryName", categoryName);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
query.setFirstResult(offset);
query.setMaxResults(limit);
return query.getResultList();
}
@Override
public List<Category> readAllCategories() {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_ALL_CATEGORIES", Category.class);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Category> readAllCategories(int limit, int offset) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_ALL_CATEGORIES", Category.class);
query.setFirstResult(offset);
query.setMaxResults(limit);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Product> readAllProducts() {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_ALL_PRODUCTS", Product.class);
//don't cache - could take up too much memory
return query.getResultList();
}
@Override
public List<Product> readAllProducts(int limit, int offset) {
TypedQuery<Product> query = em.createNamedQuery("BC_READ_ALL_PRODUCTS", Product.class);
//don't cache - could take up too much memory
query.setFirstResult(offset);
query.setMaxResults(limit);
return query.getResultList();
}
@Override
public List<Category> readAllSubCategories(Category category) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_ALL_SUBCATEGORIES", Category.class);
query.setParameter("defaultParentCategory", category);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Category> readAllSubCategories(Category category, int limit, int offset) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_ALL_SUBCATEGORIES", Category.class);
query.setParameter("defaultParentCategory", category);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
query.setFirstResult(offset);
query.setMaxResults(limit);
return query.getResultList();
}
@Override
public List<Category> readActiveSubCategoriesByCategory(Category category) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_ACTIVE_SUBCATEGORIES_BY_CATEGORY", Category.class);
query.setParameter("defaultParentCategoryId", category.getId());
query.setParameter("currentDate", getCurrentDateAfterFactoringInDateResolution());
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
return query.getResultList();
}
@Override
public List<Category> readActiveSubCategoriesByCategory(Category category, int limit, int offset) {
TypedQuery<Category> query = em.createNamedQuery("BC_READ_ACTIVE_SUBCATEGORIES_BY_CATEGORY", Category.class);
query.setParameter("defaultParentCategoryId", category.getId());
query.setParameter("currentDate", getCurrentDateAfterFactoringInDateResolution());
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
query.setFirstResult(offset);
query.setMaxResults(limit);
return query.getResultList();
}
@Override
public Long getCurrentDateResolution() {
return currentDateResolution;
}
@Override
public void setCurrentDateResolution(Long currentDateResolution) {
this.currentDateResolution = currentDateResolution;
}
@Override
public void delete(Category category) {
((Status) category).setArchived('Y');
em.merge(category);
}
@Override
public Category create() {
return (Category) entityConfiguration.createEntityInstance(Category.class.getName());
}
@Override
public Category findCategoryByURI(String uri) {
Query query;
query = em.createNamedQuery("BC_READ_CATEGORY_OUTGOING_URL");
query.setParameter("currentDate", getCurrentDateAfterFactoringInDateResolution());
query.setParameter("url", uri);
query.setHint(QueryHints.HINT_CACHEABLE, true);
query.setHint(QueryHints.HINT_CACHE_REGION, "query.Catalog");
@SuppressWarnings("unchecked")
List<Category> results = query.getResultList();
if (results != null && !results.isEmpty()) {
return results.get(0);
} else {
return null;
}
}
}
| 0true
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_catalog_dao_CategoryDaoImpl.java
|
528 |
public class DimensionUnitOfMeasureType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, DimensionUnitOfMeasureType> TYPES = new LinkedHashMap<String, DimensionUnitOfMeasureType>();
public static final DimensionUnitOfMeasureType CENTIMETERS = new DimensionUnitOfMeasureType("CENTIMETERS", "Centimeters");
public static final DimensionUnitOfMeasureType METERS = new DimensionUnitOfMeasureType("METERS", "Meters");
public static final DimensionUnitOfMeasureType INCHES = new DimensionUnitOfMeasureType("INCHES", "Inches");
public static final DimensionUnitOfMeasureType FEET = new DimensionUnitOfMeasureType("FEET", "Feet");
public static DimensionUnitOfMeasureType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public DimensionUnitOfMeasureType() {
//do nothing
}
public DimensionUnitOfMeasureType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
public String getType() {
return type;
}
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)){
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DimensionUnitOfMeasureType other = (DimensionUnitOfMeasureType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_util_DimensionUnitOfMeasureType.java
|
4,963 |
public class RestUtils {
public static PathTrie.Decoder REST_DECODER = new PathTrie.Decoder() {
@Override
public String decode(String value) {
return RestUtils.decodeComponent(value);
}
};
public static boolean isBrowser(@Nullable String userAgent) {
if (userAgent == null) {
return false;
}
// chrome, safari, firefox, ie
if (userAgent.startsWith("Mozilla")) {
return true;
}
return false;
}
public static void decodeQueryString(String s, int fromIndex, Map<String, String> params) {
if (fromIndex < 0) {
return;
}
if (fromIndex >= s.length()) {
return;
}
String name = null;
int pos = fromIndex; // Beginning of the unprocessed region
int i; // End of the unprocessed region
char c = 0; // Current character
for (i = fromIndex; i < s.length(); i++) {
c = s.charAt(i);
if (c == '=' && name == null) {
if (pos != i) {
name = decodeComponent(s.substring(pos, i));
}
pos = i + 1;
} else if (c == '&') {
if (name == null && pos != i) {
// We haven't seen an `=' so far but moved forward.
// Must be a param of the form '&a&' so add it with
// an empty value.
addParam(params, decodeComponent(s.substring(pos, i)), "");
} else if (name != null) {
addParam(params, name, decodeComponent(s.substring(pos, i)));
name = null;
}
pos = i + 1;
}
}
if (pos != i) { // Are there characters we haven't dealt with?
if (name == null) { // Yes and we haven't seen any `='.
addParam(params, decodeComponent(s.substring(pos, i)), "");
} else { // Yes and this must be the last value.
addParam(params, name, decodeComponent(s.substring(pos, i)));
}
} else if (name != null) { // Have we seen a name without value?
addParam(params, name, "");
}
}
private static void addParam(Map<String, String> params, String name, String value) {
params.put(name, value);
}
/**
* Decodes a bit of an URL encoded by a browser.
* <p/>
* This is equivalent to calling {@link #decodeComponent(String, Charset)}
* with the UTF-8 charset (recommended to comply with RFC 3986, Section 2).
*
* @param s The string to decode (can be empty).
* @return The decoded string, or {@code s} if there's nothing to decode.
* If the string to decode is {@code null}, returns an empty string.
* @throws IllegalArgumentException if the string contains a malformed
* escape sequence.
*/
public static String decodeComponent(final String s) {
return decodeComponent(s, Charsets.UTF_8);
}
/**
* Decodes a bit of an URL encoded by a browser.
* <p/>
* The string is expected to be encoded as per RFC 3986, Section 2.
* This is the encoding used by JavaScript functions {@code encodeURI}
* and {@code encodeURIComponent}, but not {@code escape}. For example
* in this encoding, é (in Unicode {@code U+00E9} or in UTF-8
* {@code 0xC3 0xA9}) is encoded as {@code %C3%A9} or {@code %c3%a9}.
* <p/>
* This is essentially equivalent to calling
* <code>{@link java.net.URLDecoder URLDecoder}.{@link
* java.net.URLDecoder#decode(String, String)}</code>
* except that it's over 2x faster and generates less garbage for the GC.
* Actually this function doesn't allocate any memory if there's nothing
* to decode, the argument itself is returned.
*
* @param s The string to decode (can be empty).
* @param charset The charset to use to decode the string (should really
* be {@link Charsets#UTF_8}.
* @return The decoded string, or {@code s} if there's nothing to decode.
* If the string to decode is {@code null}, returns an empty string.
* @throws IllegalArgumentException if the string contains a malformed
* escape sequence.
*/
@SuppressWarnings("fallthrough")
public static String decodeComponent(final String s, final Charset charset) {
if (s == null) {
return "";
}
final int size = s.length();
boolean modified = false;
for (int i = 0; i < size; i++) {
final char c = s.charAt(i);
switch (c) {
case '%':
i++; // We can skip at least one char, e.g. `%%'.
// Fall through.
case '+':
modified = true;
break;
}
}
if (!modified) {
return s;
}
final byte[] buf = new byte[size];
int pos = 0; // position in `buf'.
for (int i = 0; i < size; i++) {
char c = s.charAt(i);
switch (c) {
case '+':
buf[pos++] = ' '; // "+" -> " "
break;
case '%':
if (i == size - 1) {
throw new IllegalArgumentException("unterminated escape"
+ " sequence at end of string: " + s);
}
c = s.charAt(++i);
if (c == '%') {
buf[pos++] = '%'; // "%%" -> "%"
break;
} else if (i == size - 1) {
throw new IllegalArgumentException("partial escape"
+ " sequence at end of string: " + s);
}
c = decodeHexNibble(c);
final char c2 = decodeHexNibble(s.charAt(++i));
if (c == Character.MAX_VALUE || c2 == Character.MAX_VALUE) {
throw new IllegalArgumentException(
"invalid escape sequence `%" + s.charAt(i - 1)
+ s.charAt(i) + "' at index " + (i - 2)
+ " of: " + s);
}
c = (char) (c * 16 + c2);
// Fall through.
default:
buf[pos++] = (byte) c;
break;
}
}
return new String(buf, 0, pos, charset);
}
/**
* Helper to decode half of a hexadecimal number from a string.
*
* @param c The ASCII character of the hexadecimal number to decode.
* Must be in the range {@code [0-9a-fA-F]}.
* @return The hexadecimal value represented in the ASCII character
* given, or {@link Character#MAX_VALUE} if the character is invalid.
*/
private static char decodeHexNibble(final char c) {
if ('0' <= c && c <= '9') {
return (char) (c - '0');
} else if ('a' <= c && c <= 'f') {
return (char) (c - 'a' + 10);
} else if ('A' <= c && c <= 'F') {
return (char) (c - 'A' + 10);
} else {
return Character.MAX_VALUE;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_rest_support_RestUtils.java
|
176 |
@RunWith(HazelcastSerialClassRunner.class)
@Category(QuickTest.class)
public class ClientAtomicLongTest {
static final String name = "test1";
static HazelcastInstance client;
static HazelcastInstance server;
static IAtomicLong l;
@BeforeClass
public static void init(){
server = Hazelcast.newHazelcastInstance();
client = HazelcastClient.newHazelcastClient();
l = client.getAtomicLong(name);
}
@AfterClass
public static void destroy() {
client.shutdown();
Hazelcast.shutdownAll();
}
@Before
@After
public void clear() throws IOException {
l.set(0);
}
@Test
public void test() throws Exception {
assertEquals(0, l.getAndAdd(2));
assertEquals(2, l.get());
l.set(5);
assertEquals(5, l.get());
assertEquals(8, l.addAndGet(3));
assertFalse(l.compareAndSet(7, 4));
assertEquals(8, l.get());
assertTrue(l.compareAndSet(8, 4));
assertEquals(4, l.get());
assertEquals(3, l.decrementAndGet());
assertEquals(3, l.getAndIncrement());
assertEquals(4, l.getAndSet(9));
assertEquals(10, l.incrementAndGet());
}
@Test(expected = IllegalArgumentException.class)
public void apply_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("apply_whenCalledWithNullFunction");
ref.apply(null);
}
@Test
public void apply() {
IAtomicLong ref = client.getAtomicLong("apply");
assertEquals(new Long(1), ref.apply(new AddOneFunction()));
assertEquals(0, ref.get());
}
@Test
public void apply_whenException() {
IAtomicLong ref = client.getAtomicLong("apply_whenException");
ref.set(1);
try {
ref.apply(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(1, ref.get());
}
@Test(expected = IllegalArgumentException.class)
public void alter_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("alter_whenCalledWithNullFunction");
ref.alter(null);
}
@Test
public void alter_whenException() {
IAtomicLong ref = client.getAtomicLong("alter_whenException");
ref.set(10);
try {
ref.alter(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(10, ref.get());
}
@Test
public void alter() {
IAtomicLong ref = client.getAtomicLong("alter");
ref.set(10);
ref.alter(new AddOneFunction());
assertEquals(11, ref.get());
}
@Test(expected = IllegalArgumentException.class)
public void alterAndGet_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("alterAndGet_whenCalledWithNullFunction");
ref.alterAndGet(null);
}
@Test
public void alterAndGet_whenException() {
IAtomicLong ref = client.getAtomicLong("alterAndGet_whenException");
ref.set(10);
try {
ref.alterAndGet(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(10, ref.get());
}
@Test
public void alterAndGet() {
IAtomicLong ref = client.getAtomicLong("alterAndGet");
ref.set(10);
assertEquals(11, ref.alterAndGet(new AddOneFunction()));
assertEquals(11, ref.get());
}
@Test(expected = IllegalArgumentException.class)
public void getAndAlter_whenCalledWithNullFunction() {
IAtomicLong ref = client.getAtomicLong("getAndAlter_whenCalledWithNullFunction");
ref.getAndAlter(null);
}
@Test
public void getAndAlter_whenException() {
IAtomicLong ref = client.getAtomicLong("getAndAlter_whenException");
ref.set(10);
try {
ref.getAndAlter(new FailingFunction());
fail();
} catch (WoohaaException expected) {
}
assertEquals(10, ref.get());
}
@Test
public void getAndAlter() {
IAtomicLong ref = client.getAtomicLong("getAndAlter");
ref.set(10);
assertEquals(10, ref.getAndAlter(new AddOneFunction()));
assertEquals(11, ref.get());
}
private static class AddOneFunction implements IFunction<Long, Long> {
@Override
public Long apply(Long input) {
return input+1;
}
}
private static class FailingFunction implements IFunction<Long, Long> {
@Override
public Long apply(Long input) {
throw new WoohaaException();
}
}
private static class WoohaaException extends RuntimeException {
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_atomiclong_ClientAtomicLongTest.java
|
308 |
public class ClusterHealthResponse extends ActionResponse implements Iterable<ClusterIndexHealth>, ToXContent {
private String clusterName;
int numberOfNodes = 0;
int numberOfDataNodes = 0;
int activeShards = 0;
int relocatingShards = 0;
int activePrimaryShards = 0;
int initializingShards = 0;
int unassignedShards = 0;
boolean timedOut = false;
ClusterHealthStatus status = ClusterHealthStatus.RED;
private List<String> validationFailures;
Map<String, ClusterIndexHealth> indices = Maps.newHashMap();
ClusterHealthResponse() {
}
public ClusterHealthResponse(String clusterName, List<String> validationFailures) {
this.clusterName = clusterName;
this.validationFailures = validationFailures;
}
public ClusterHealthResponse(String clusterName, String[] concreteIndices, ClusterState clusterState) {
this.clusterName = clusterName;
RoutingTableValidation validation = clusterState.routingTable().validate(clusterState.metaData());
validationFailures = validation.failures();
numberOfNodes = clusterState.nodes().size();
numberOfDataNodes = clusterState.nodes().dataNodes().size();
for (String index : concreteIndices) {
IndexRoutingTable indexRoutingTable = clusterState.routingTable().index(index);
IndexMetaData indexMetaData = clusterState.metaData().index(index);
if (indexRoutingTable == null) {
continue;
}
ClusterIndexHealth indexHealth = new ClusterIndexHealth(indexMetaData, indexRoutingTable);
indices.put(indexHealth.getIndex(), indexHealth);
}
status = ClusterHealthStatus.GREEN;
for (ClusterIndexHealth indexHealth : indices.values()) {
activePrimaryShards += indexHealth.activePrimaryShards;
activeShards += indexHealth.activeShards;
relocatingShards += indexHealth.relocatingShards;
initializingShards += indexHealth.initializingShards;
unassignedShards += indexHealth.unassignedShards;
if (indexHealth.getStatus() == ClusterHealthStatus.RED) {
status = ClusterHealthStatus.RED;
} else if (indexHealth.getStatus() == ClusterHealthStatus.YELLOW && status != ClusterHealthStatus.RED) {
status = ClusterHealthStatus.YELLOW;
}
}
if (!validationFailures.isEmpty()) {
status = ClusterHealthStatus.RED;
} else if (clusterState.blocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE)) {
status = ClusterHealthStatus.RED;
}
}
public String getClusterName() {
return clusterName;
}
/**
* The validation failures on the cluster level (without index validation failures).
*/
public List<String> getValidationFailures() {
return this.validationFailures;
}
/**
* All the validation failures, including index level validation failures.
*/
public List<String> getAllValidationFailures() {
List<String> allFailures = newArrayList(getValidationFailures());
for (ClusterIndexHealth indexHealth : indices.values()) {
allFailures.addAll(indexHealth.getValidationFailures());
}
return allFailures;
}
public int getActiveShards() {
return activeShards;
}
public int getRelocatingShards() {
return relocatingShards;
}
public int getActivePrimaryShards() {
return activePrimaryShards;
}
public int getInitializingShards() {
return initializingShards;
}
public int getUnassignedShards() {
return unassignedShards;
}
public int getNumberOfNodes() {
return this.numberOfNodes;
}
public int getNumberOfDataNodes() {
return this.numberOfDataNodes;
}
/**
* <tt>true</tt> if the waitForXXX has timeout out and did not match.
*/
public boolean isTimedOut() {
return this.timedOut;
}
public ClusterHealthStatus getStatus() {
return status;
}
public Map<String, ClusterIndexHealth> getIndices() {
return indices;
}
@Override
public Iterator<ClusterIndexHealth> iterator() {
return indices.values().iterator();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
clusterName = in.readString();
activePrimaryShards = in.readVInt();
activeShards = in.readVInt();
relocatingShards = in.readVInt();
initializingShards = in.readVInt();
unassignedShards = in.readVInt();
numberOfNodes = in.readVInt();
numberOfDataNodes = in.readVInt();
status = ClusterHealthStatus.fromValue(in.readByte());
int size = in.readVInt();
for (int i = 0; i < size; i++) {
ClusterIndexHealth indexHealth = readClusterIndexHealth(in);
indices.put(indexHealth.getIndex(), indexHealth);
}
timedOut = in.readBoolean();
size = in.readVInt();
if (size == 0) {
validationFailures = ImmutableList.of();
} else {
for (int i = 0; i < size; i++) {
validationFailures.add(in.readString());
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(clusterName);
out.writeVInt(activePrimaryShards);
out.writeVInt(activeShards);
out.writeVInt(relocatingShards);
out.writeVInt(initializingShards);
out.writeVInt(unassignedShards);
out.writeVInt(numberOfNodes);
out.writeVInt(numberOfDataNodes);
out.writeByte(status.value());
out.writeVInt(indices.size());
for (ClusterIndexHealth indexHealth : this) {
indexHealth.writeTo(out);
}
out.writeBoolean(timedOut);
out.writeVInt(validationFailures.size());
for (String failure : validationFailures) {
out.writeString(failure);
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder("ClusterHealthResponse - status [").append(status).append("]")
.append("\ntimedOut [").append(timedOut).append("]")
.append("\nclustername [").append(clusterName).append("]")
.append("\nnumberOfNodes [").append(numberOfNodes).append("]")
.append("\nnumberOfDataNodes [").append(numberOfDataNodes).append("]")
.append("\nactiveShards [").append(activeShards).append("]")
.append("\nrelocatingShards [").append(relocatingShards).append("]")
.append("\nactivePrimaryShards [").append(activePrimaryShards).append("]")
.append("\ninitializingShards [").append(initializingShards).append("]")
.append("\nvalidationFailures ").append(validationFailures)
.append("\nindices:");
for (Map.Entry<String, ClusterIndexHealth> indexEntry : indices.entrySet()) {
builder.append(" [").append(indexEntry.getKey()).append("][").append(indexEntry.getValue().status).append("]");
}
return builder.toString();
}
static final class Fields {
static final XContentBuilderString CLUSTER_NAME = new XContentBuilderString("cluster_name");
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final XContentBuilderString TIMED_OUT = new XContentBuilderString("timed_out");
static final XContentBuilderString NUMBER_OF_NODES = new XContentBuilderString("number_of_nodes");
static final XContentBuilderString NUMBER_OF_DATA_NODES = new XContentBuilderString("number_of_data_nodes");
static final XContentBuilderString ACTIVE_PRIMARY_SHARDS = new XContentBuilderString("active_primary_shards");
static final XContentBuilderString ACTIVE_SHARDS = new XContentBuilderString("active_shards");
static final XContentBuilderString RELOCATING_SHARDS = new XContentBuilderString("relocating_shards");
static final XContentBuilderString INITIALIZING_SHARDS = new XContentBuilderString("initializing_shards");
static final XContentBuilderString UNASSIGNED_SHARDS = new XContentBuilderString("unassigned_shards");
static final XContentBuilderString VALIDATION_FAILURES = new XContentBuilderString("validation_failures");
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.CLUSTER_NAME, getClusterName());
builder.field(Fields.STATUS, getStatus().name().toLowerCase(Locale.ROOT));
builder.field(Fields.TIMED_OUT, isTimedOut());
builder.field(Fields.NUMBER_OF_NODES, getNumberOfNodes());
builder.field(Fields.NUMBER_OF_DATA_NODES, getNumberOfDataNodes());
builder.field(Fields.ACTIVE_PRIMARY_SHARDS, getActivePrimaryShards());
builder.field(Fields.ACTIVE_SHARDS, getActiveShards());
builder.field(Fields.RELOCATING_SHARDS, getRelocatingShards());
builder.field(Fields.INITIALIZING_SHARDS, getInitializingShards());
builder.field(Fields.UNASSIGNED_SHARDS, getUnassignedShards());
String level = params.param("level", "cluster");
boolean outputIndices = "indices".equals(level) || "shards".equals(level);
if (!getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : getValidationFailures()) {
builder.value(validationFailure);
}
// if we don't print index level information, still print the index validation failures
// so we know why the status is red
if (!outputIndices) {
for (ClusterIndexHealth indexHealth : indices.values()) {
builder.startObject(indexHealth.getIndex());
if (!indexHealth.getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : indexHealth.getValidationFailures()) {
builder.value(validationFailure);
}
builder.endArray();
}
builder.endObject();
}
}
builder.endArray();
}
if (outputIndices) {
builder.startObject(Fields.INDICES);
for (ClusterIndexHealth indexHealth : indices.values()) {
builder.startObject(indexHealth.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
indexHealth.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
}
return builder;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_health_ClusterHealthResponse.java
|
6,274 |
public class MatchAssertion extends Assertion {
private static final ESLogger logger = Loggers.getLogger(MatchAssertion.class);
public MatchAssertion(String field, Object expectedValue) {
super(field, expectedValue);
}
@Override
protected void doAssert(Object actualValue, Object expectedValue) {
//if the value is wrapped into / it is a regexp (e.g. /s+d+/)
if (expectedValue instanceof String) {
String expValue = ((String) expectedValue).trim();
if (expValue.length() > 2 && expValue.startsWith("/") && expValue.endsWith("/")) {
String regex = expValue.substring(1, expValue.length() - 1);
logger.trace("assert that [{}] matches [{}]", actualValue, regex);
assertThat("field [" + getField() + "] was expected to match the provided regex but didn't",
actualValue.toString(), matches(regex, Pattern.COMMENTS));
return;
}
}
assertThat(errorMessage(), actualValue, notNullValue());
logger.trace("assert that [{}] matches [{}]", actualValue, expectedValue);
if (!actualValue.getClass().equals(expectedValue.getClass())) {
if (actualValue instanceof Number && expectedValue instanceof Number) {
//Double 1.0 is equal to Integer 1
assertThat(errorMessage(), ((Number) actualValue).doubleValue(), equalTo(((Number) expectedValue).doubleValue()));
return;
}
}
assertThat(errorMessage(), actualValue, equalTo(expectedValue));
}
private String errorMessage() {
return "field [" + getField() + "] doesn't match the expected value";
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_rest_section_MatchAssertion.java
|
2,137 |
public class Lucene {
public static final Version VERSION = Version.LUCENE_46;
public static final Version ANALYZER_VERSION = VERSION;
public static final Version QUERYPARSER_VERSION = VERSION;
public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer(ANALYZER_VERSION));
public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer());
public static final int NO_DOC = -1;
public static ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0];
@SuppressWarnings("deprecation")
public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) {
if (version == null) {
return defaultVersion;
}
if ("4.6".equals(version)) {
return VERSION.LUCENE_46;
}
if ("4.5".equals(version)) {
return VERSION.LUCENE_45;
}
if ("4.4".equals(version)) {
return VERSION.LUCENE_44;
}
if ("4.3".equals(version)) {
return Version.LUCENE_43;
}
if ("4.2".equals(version)) {
return Version.LUCENE_42;
}
if ("4.1".equals(version)) {
return Version.LUCENE_41;
}
if ("4.0".equals(version)) {
return Version.LUCENE_40;
}
if ("3.6".equals(version)) {
return Version.LUCENE_36;
}
if ("3.5".equals(version)) {
return Version.LUCENE_35;
}
if ("3.4".equals(version)) {
return Version.LUCENE_34;
}
if ("3.3".equals(version)) {
return Version.LUCENE_33;
}
if ("3.2".equals(version)) {
return Version.LUCENE_32;
}
if ("3.1".equals(version)) {
return Version.LUCENE_31;
}
if ("3.0".equals(version)) {
return Version.LUCENE_30;
}
logger.warn("no version match {}, default to {}", version, defaultVersion);
return defaultVersion;
}
/**
* Reads the segments infos, failing if it fails to load
*/
public static SegmentInfos readSegmentInfos(Directory directory) throws IOException {
final SegmentInfos sis = new SegmentInfos();
sis.read(directory);
return sis;
}
public static long count(IndexSearcher searcher, Query query) throws IOException {
TotalHitCountCollector countCollector = new TotalHitCountCollector();
// we don't need scores, so wrap it in a constant score query
if (!(query instanceof ConstantScoreQuery)) {
query = new ConstantScoreQuery(query);
}
searcher.search(query, countCollector);
return countCollector.getTotalHits();
}
/**
* Closes the index writer, returning <tt>false</tt> if it failed to close.
*/
public static boolean safeClose(IndexWriter writer) {
if (writer == null) {
return true;
}
try {
writer.close();
return true;
} catch (Throwable e) {
return false;
}
}
public static TopDocs readTopDocs(StreamInput in) throws IOException {
if (!in.readBoolean()) {
// no docs
return null;
}
if (in.readBoolean()) {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
SortField[] fields = new SortField[in.readVInt()];
for (int i = 0; i < fields.length; i++) {
String field = null;
if (in.readBoolean()) {
field = in.readString();
}
fields[i] = new SortField(field, readSortType(in), in.readBoolean());
}
FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()];
for (int i = 0; i < fieldDocs.length; i++) {
Comparable[] cFields = new Comparable[in.readVInt()];
for (int j = 0; j < cFields.length; j++) {
byte type = in.readByte();
if (type == 0) {
cFields[j] = null;
} else if (type == 1) {
cFields[j] = in.readString();
} else if (type == 2) {
cFields[j] = in.readInt();
} else if (type == 3) {
cFields[j] = in.readLong();
} else if (type == 4) {
cFields[j] = in.readFloat();
} else if (type == 5) {
cFields[j] = in.readDouble();
} else if (type == 6) {
cFields[j] = in.readByte();
} else if (type == 7) {
cFields[j] = in.readShort();
} else if (type == 8) {
cFields[j] = in.readBoolean();
} else if (type == 9) {
cFields[j] = in.readBytesRef();
} else {
throw new IOException("Can't match type [" + type + "]");
}
}
fieldDocs[i] = new FieldDoc(in.readVInt(), in.readFloat(), cFields);
}
return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore);
} else {
int totalHits = in.readVInt();
float maxScore = in.readFloat();
ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()];
for (int i = 0; i < scoreDocs.length; i++) {
scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat());
}
return new TopDocs(totalHits, scoreDocs, maxScore);
}
}
public static void writeTopDocs(StreamOutput out, TopDocs topDocs, int from) throws IOException {
if (topDocs.scoreDocs.length - from < 0) {
out.writeBoolean(false);
return;
}
out.writeBoolean(true);
if (topDocs instanceof TopFieldDocs) {
out.writeBoolean(true);
TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs;
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topFieldDocs.fields.length);
for (SortField sortField : topFieldDocs.fields) {
if (sortField.getField() == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(sortField.getField());
}
if (sortField.getComparatorSource() != null) {
writeSortType(out, ((IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource()).reducedType());
} else {
writeSortType(out, sortField.getType());
}
out.writeBoolean(sortField.getReverse());
}
out.writeVInt(topDocs.scoreDocs.length - from);
int index = 0;
for (ScoreDoc doc : topFieldDocs.scoreDocs) {
if (index++ < from) {
continue;
}
FieldDoc fieldDoc = (FieldDoc) doc;
out.writeVInt(fieldDoc.fields.length);
for (Object field : fieldDoc.fields) {
if (field == null) {
out.writeByte((byte) 0);
} else {
Class type = field.getClass();
if (type == String.class) {
out.writeByte((byte) 1);
out.writeString((String) field);
} else if (type == Integer.class) {
out.writeByte((byte) 2);
out.writeInt((Integer) field);
} else if (type == Long.class) {
out.writeByte((byte) 3);
out.writeLong((Long) field);
} else if (type == Float.class) {
out.writeByte((byte) 4);
out.writeFloat((Float) field);
} else if (type == Double.class) {
out.writeByte((byte) 5);
out.writeDouble((Double) field);
} else if (type == Byte.class) {
out.writeByte((byte) 6);
out.writeByte((Byte) field);
} else if (type == Short.class) {
out.writeByte((byte) 7);
out.writeShort((Short) field);
} else if (type == Boolean.class) {
out.writeByte((byte) 8);
out.writeBoolean((Boolean) field);
} else if (type == BytesRef.class) {
out.writeByte((byte) 9);
out.writeBytesRef((BytesRef) field);
} else {
throw new IOException("Can't handle sort field value of type [" + type + "]");
}
}
}
out.writeVInt(doc.doc);
out.writeFloat(doc.score);
}
} else {
out.writeBoolean(false);
out.writeVInt(topDocs.totalHits);
out.writeFloat(topDocs.getMaxScore());
out.writeVInt(topDocs.scoreDocs.length - from);
int index = 0;
for (ScoreDoc doc : topDocs.scoreDocs) {
if (index++ < from) {
continue;
}
out.writeVInt(doc.doc);
out.writeFloat(doc.score);
}
}
}
// LUCENE 4 UPGRADE: We might want to maintain our own ordinal, instead of Lucene's ordinal
public static SortField.Type readSortType(StreamInput in) throws IOException {
return SortField.Type.values()[in.readVInt()];
}
public static void writeSortType(StreamOutput out, SortField.Type sortType) throws IOException {
out.writeVInt(sortType.ordinal());
}
public static Explanation readExplanation(StreamInput in) throws IOException {
float value = in.readFloat();
String description = in.readString();
Explanation explanation = new Explanation(value, description);
if (in.readBoolean()) {
int size = in.readVInt();
for (int i = 0; i < size; i++) {
explanation.addDetail(readExplanation(in));
}
}
return explanation;
}
public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException {
out.writeFloat(explanation.getValue());
out.writeString(explanation.getDescription());
Explanation[] subExplanations = explanation.getDetails();
if (subExplanations == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeVInt(subExplanations.length);
for (Explanation subExp : subExplanations) {
writeExplanation(out, subExp);
}
}
}
public static class ExistsCollector extends Collector {
private boolean exists;
public void reset() {
exists = false;
}
public boolean exists() {
return exists;
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.exists = false;
}
@Override
public void collect(int doc) throws IOException {
exists = true;
}
@Override
public void setNextReader(AtomicReaderContext context) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
}
private Lucene() {
}
public static final boolean indexExists(final Directory directory) throws IOException {
return DirectoryReader.indexExists(directory);
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_Lucene.java
|
311 |
public class ClusterIndexHealth implements Iterable<ClusterShardHealth>, Streamable, ToXContent {
private String index;
private int numberOfShards;
private int numberOfReplicas;
int activeShards = 0;
int relocatingShards = 0;
int initializingShards = 0;
int unassignedShards = 0;
int activePrimaryShards = 0;
ClusterHealthStatus status = ClusterHealthStatus.RED;
final Map<Integer, ClusterShardHealth> shards = Maps.newHashMap();
List<String> validationFailures;
private ClusterIndexHealth() {
}
public ClusterIndexHealth(IndexMetaData indexMetaData, IndexRoutingTable indexRoutingTable) {
this.index = indexMetaData.index();
this.numberOfShards = indexMetaData.getNumberOfShards();
this.numberOfReplicas = indexMetaData.getNumberOfReplicas();
this.validationFailures = indexRoutingTable.validate(indexMetaData);
for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) {
ClusterShardHealth shardHealth = new ClusterShardHealth(shardRoutingTable.shardId().id());
for (ShardRouting shardRouting : shardRoutingTable) {
if (shardRouting.active()) {
shardHealth.activeShards++;
if (shardRouting.relocating()) {
// the shard is relocating, the one it is relocating to will be in initializing state, so we don't count it
shardHealth.relocatingShards++;
}
if (shardRouting.primary()) {
shardHealth.primaryActive = true;
}
} else if (shardRouting.initializing()) {
shardHealth.initializingShards++;
} else if (shardRouting.unassigned()) {
shardHealth.unassignedShards++;
}
}
if (shardHealth.primaryActive) {
if (shardHealth.activeShards == shardRoutingTable.size()) {
shardHealth.status = ClusterHealthStatus.GREEN;
} else {
shardHealth.status = ClusterHealthStatus.YELLOW;
}
} else {
shardHealth.status = ClusterHealthStatus.RED;
}
shards.put(shardHealth.getId(), shardHealth);
}
// update the index status
status = ClusterHealthStatus.GREEN;
for (ClusterShardHealth shardHealth : shards.values()) {
if (shardHealth.isPrimaryActive()) {
activePrimaryShards++;
}
activeShards += shardHealth.activeShards;
relocatingShards += shardHealth.relocatingShards;
initializingShards += shardHealth.initializingShards;
unassignedShards += shardHealth.unassignedShards;
if (shardHealth.getStatus() == ClusterHealthStatus.RED) {
status = ClusterHealthStatus.RED;
} else if (shardHealth.getStatus() == ClusterHealthStatus.YELLOW && status != ClusterHealthStatus.RED) {
// do not override an existing red
status = ClusterHealthStatus.YELLOW;
}
}
if (!validationFailures.isEmpty()) {
status = ClusterHealthStatus.RED;
} else if (shards.isEmpty()) { // might be since none has been created yet (two phase index creation)
status = ClusterHealthStatus.RED;
}
}
public String getIndex() {
return index;
}
public List<String> getValidationFailures() {
return this.validationFailures;
}
public int getNumberOfShards() {
return numberOfShards;
}
public int getNumberOfReplicas() {
return numberOfReplicas;
}
public int getActiveShards() {
return activeShards;
}
public int getRelocatingShards() {
return relocatingShards;
}
public int getActivePrimaryShards() {
return activePrimaryShards;
}
public int getInitializingShards() {
return initializingShards;
}
public int getUnassignedShards() {
return unassignedShards;
}
public ClusterHealthStatus getStatus() {
return status;
}
public Map<Integer, ClusterShardHealth> getShards() {
return this.shards;
}
@Override
public Iterator<ClusterShardHealth> iterator() {
return shards.values().iterator();
}
public static ClusterIndexHealth readClusterIndexHealth(StreamInput in) throws IOException {
ClusterIndexHealth indexHealth = new ClusterIndexHealth();
indexHealth.readFrom(in);
return indexHealth;
}
@Override
public void readFrom(StreamInput in) throws IOException {
index = in.readString();
numberOfShards = in.readVInt();
numberOfReplicas = in.readVInt();
activePrimaryShards = in.readVInt();
activeShards = in.readVInt();
relocatingShards = in.readVInt();
initializingShards = in.readVInt();
unassignedShards = in.readVInt();
status = ClusterHealthStatus.fromValue(in.readByte());
int size = in.readVInt();
for (int i = 0; i < size; i++) {
ClusterShardHealth shardHealth = readClusterShardHealth(in);
shards.put(shardHealth.getId(), shardHealth);
}
size = in.readVInt();
if (size == 0) {
validationFailures = ImmutableList.of();
} else {
for (int i = 0; i < size; i++) {
validationFailures.add(in.readString());
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeVInt(numberOfShards);
out.writeVInt(numberOfReplicas);
out.writeVInt(activePrimaryShards);
out.writeVInt(activeShards);
out.writeVInt(relocatingShards);
out.writeVInt(initializingShards);
out.writeVInt(unassignedShards);
out.writeByte(status.value());
out.writeVInt(shards.size());
for (ClusterShardHealth shardHealth : this) {
shardHealth.writeTo(out);
}
out.writeVInt(validationFailures.size());
for (String failure : validationFailures) {
out.writeString(failure);
}
}
static final class Fields {
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final XContentBuilderString NUMBER_OF_SHARDS = new XContentBuilderString("number_of_shards");
static final XContentBuilderString NUMBER_OF_REPLICAS = new XContentBuilderString("number_of_replicas");
static final XContentBuilderString ACTIVE_PRIMARY_SHARDS = new XContentBuilderString("active_primary_shards");
static final XContentBuilderString ACTIVE_SHARDS = new XContentBuilderString("active_shards");
static final XContentBuilderString RELOCATING_SHARDS = new XContentBuilderString("relocating_shards");
static final XContentBuilderString INITIALIZING_SHARDS = new XContentBuilderString("initializing_shards");
static final XContentBuilderString UNASSIGNED_SHARDS = new XContentBuilderString("unassigned_shards");
static final XContentBuilderString VALIDATION_FAILURES = new XContentBuilderString("validation_failures");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString PRIMARY_ACTIVE = new XContentBuilderString("primary_active");
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.STATUS, getStatus().name().toLowerCase(Locale.ROOT));
builder.field(Fields.NUMBER_OF_SHARDS, getNumberOfShards());
builder.field(Fields.NUMBER_OF_REPLICAS, getNumberOfReplicas());
builder.field(Fields.ACTIVE_PRIMARY_SHARDS, getActivePrimaryShards());
builder.field(Fields.ACTIVE_SHARDS, getActiveShards());
builder.field(Fields.RELOCATING_SHARDS, getRelocatingShards());
builder.field(Fields.INITIALIZING_SHARDS, getInitializingShards());
builder.field(Fields.UNASSIGNED_SHARDS, getUnassignedShards());
if (!getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : getValidationFailures()) {
builder.value(validationFailure);
}
builder.endArray();
}
if ("shards".equals(params.param("level", "indices"))) {
builder.startObject(Fields.SHARDS);
for (ClusterShardHealth shardHealth : shards.values()) {
builder.startObject(Integer.toString(shardHealth.getId()));
builder.field(Fields.STATUS, shardHealth.getStatus().name().toLowerCase(Locale.ROOT));
builder.field(Fields.PRIMARY_ACTIVE, shardHealth.isPrimaryActive());
builder.field(Fields.ACTIVE_SHARDS, shardHealth.getActiveShards());
builder.field(Fields.RELOCATING_SHARDS, shardHealth.getRelocatingShards());
builder.field(Fields.INITIALIZING_SHARDS, shardHealth.getInitializingShards());
builder.field(Fields.UNASSIGNED_SHARDS, shardHealth.getUnassignedShards());
builder.endObject();
}
builder.endObject();
}
return builder;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_health_ClusterIndexHealth.java
|
179 |
private static class WoohaaException extends RuntimeException {
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_atomiclong_ClientAtomicLongTest.java
|
76 |
@SuppressWarnings("serial")
static final class MapReduceKeysToLongTask<K,V>
extends BulkTask<K,V,Long> {
final ObjectToLong<? super K> transformer;
final LongByLongToLong reducer;
final long basis;
long result;
MapReduceKeysToLongTask<K,V> rights, nextRight;
MapReduceKeysToLongTask
(BulkTask<K,V,?> p, int b, int i, int f, Node<K,V>[] t,
MapReduceKeysToLongTask<K,V> nextRight,
ObjectToLong<? super K> transformer,
long basis,
LongByLongToLong reducer) {
super(p, b, i, f, t); this.nextRight = nextRight;
this.transformer = transformer;
this.basis = basis; this.reducer = reducer;
}
public final Long getRawResult() { return result; }
public final void compute() {
final ObjectToLong<? super K> transformer;
final LongByLongToLong reducer;
if ((transformer = this.transformer) != null &&
(reducer = this.reducer) != null) {
long r = this.basis;
for (int i = baseIndex, f, h; batch > 0 &&
(h = ((f = baseLimit) + i) >>> 1) > i;) {
addToPendingCount(1);
(rights = new MapReduceKeysToLongTask<K,V>
(this, batch >>>= 1, baseLimit = h, f, tab,
rights, transformer, r, reducer)).fork();
}
for (Node<K,V> p; (p = advance()) != null; )
r = reducer.apply(r, transformer.apply(p.key));
result = r;
CountedCompleter<?> c;
for (c = firstComplete(); c != null; c = c.nextComplete()) {
@SuppressWarnings("unchecked") MapReduceKeysToLongTask<K,V>
t = (MapReduceKeysToLongTask<K,V>)c,
s = t.rights;
while (s != null) {
t.result = reducer.apply(t.result, s.result);
s = t.rights = s.nextRight;
}
}
}
}
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
235 |
.registerHookValue(profilerPrefix + "enabled", "Cache enabled", METRIC_TYPE.ENABLED, new OProfilerHookValue() {
public Object getValue() {
return isEnabled();
}
}, profilerMetadataPrefix + "enabled");
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_cache_OAbstractRecordCache.java
|
344 |
public class NonEmptyNodeReplaceInsert extends NodeReplaceInsert {
@Override
protected boolean replaceNode(Node[] primaryNodes, Node testNode, final String attribute, List<Node> usedNodes) {
if (testNode.getAttributes().getNamedItem(attribute) == null) {
return false;
}
//filter out primary nodes that don't have the attribute
ArrayList<Node> filterList = new ArrayList<Node>();
for (int j=0;j<primaryNodes.length;j++){
if (primaryNodes[j].getAttributes().getNamedItem(attribute) != null) {
filterList.add(primaryNodes[j]);
}
}
Node[] filtered = {};
filtered = filterList.toArray(filtered);
Comparator<Node> idCompare = new Comparator<Node>() {
public int compare(Node arg0, Node arg1) {
Node id1 = arg0.getAttributes().getNamedItem(attribute);
Node id2 = arg1.getAttributes().getNamedItem(attribute);
String idVal1 = id1.getNodeValue();
String idVal2 = id2.getNodeValue();
return idVal1.compareTo(idVal2);
}
};
Arrays.sort(filtered, idCompare);
int pos = Arrays.binarySearch(filtered, testNode, idCompare);
if (pos >= 0) {
evaluate:{
if (!testNode.hasChildNodes()) {
break evaluate;
}
Node newNode = filtered[pos].getOwnerDocument().importNode(testNode.cloneNode(true), true);
filtered[pos].getParentNode().replaceChild(newNode, filtered[pos]);
}
usedNodes.add(testNode);
return true;
}
return false;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_handlers_NonEmptyNodeReplaceInsert.java
|
140 |
private class MyDistributedObjectListener implements DistributedObjectListener {
@Override
public void distributedObjectCreated(DistributedObjectEvent event) {
send(event);
}
@Override
public void distributedObjectDestroyed(DistributedObjectEvent event) {
}
private void send(DistributedObjectEvent event) {
if (endpoint.live()) {
PortableDistributedObjectEvent portableEvent = new PortableDistributedObjectEvent(
event.getEventType(), event.getDistributedObject().getName(), event.getServiceName());
endpoint.sendEvent(portableEvent, getCallId());
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_client_DistributedObjectListenerRequest.java
|
1,016 |
applyTailIndexes(lastIndexResult, new IndexValuesResultListener() {
@Override
public boolean addResult(OIdentifiable value) {
result.add(value);
return true;
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_sql_OChainedIndexProxy.java
|
2,837 |
final class PartitionReplicaVersions {
final int partitionId;
// read and updated only by operation/partition threads
final long[] versions = new long[InternalPartition.MAX_BACKUP_COUNT];
PartitionReplicaVersions(int partitionId) {
this.partitionId = partitionId;
}
long[] incrementAndGet(int backupCount) {
for (int i = 0; i < backupCount; i++) {
versions[i]++;
}
return versions;
}
long[] get() {
return versions;
}
boolean update(long[] newVersions, int currentReplica) {
int index = currentReplica - 1;
long current = versions[index];
long next = newVersions[index];
boolean updated = (current == next - 1);
if (updated) {
arraycopy(newVersions, 0, versions, 0, newVersions.length);
}
return updated;
}
void reset(long[] newVersions) {
arraycopy(newVersions, 0, versions, 0, newVersions.length);
}
void clear() {
for (int i = 0; i < versions.length; i++) {
versions[i] = 0;
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("PartitionReplicaVersions");
sb.append("{partitionId=").append(partitionId);
sb.append(", versions=").append(Arrays.toString(versions));
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_partition_impl_PartitionReplicaVersions.java
|
110 |
public class DoubleAdder extends Striped64 implements Serializable {
private static final long serialVersionUID = 7249069246863182397L;
/**
* Update function. Note that we must use "long" for underlying
* representations, because there is no compareAndSet for double,
* due to the fact that the bitwise equals used in any CAS
* implementation is not the same as double-precision equals.
* However, we use CAS only to detect and alleviate contention,
* for which bitwise equals works best anyway. In principle, the
* long/double conversions used here should be essentially free on
* most platforms since they just re-interpret bits.
*
* Similar conversions are used in other methods.
*/
final long fn(long v, long x) {
return Double.doubleToRawLongBits
(Double.longBitsToDouble(v) +
Double.longBitsToDouble(x));
}
/**
* Creates a new adder with initial sum of zero.
*/
public DoubleAdder() {
}
/**
* Adds the given value.
*
* @param x the value to add
*/
public void add(double x) {
Cell[] as; long b, v; HashCode hc; Cell a; int n;
if ((as = cells) != null ||
!casBase(b = base,
Double.doubleToRawLongBits
(Double.longBitsToDouble(b) + x))) {
boolean uncontended = true;
int h = (hc = threadHashCode.get()).code;
if (as == null || (n = as.length) < 1 ||
(a = as[(n - 1) & h]) == null ||
!(uncontended = a.cas(v = a.value,
Double.doubleToRawLongBits
(Double.longBitsToDouble(v) + x))))
retryUpdate(Double.doubleToRawLongBits(x), hc, uncontended);
}
}
/**
* Returns the current sum. The returned value is <em>NOT</em> an
* atomic snapshot; invocation in the absence of concurrent
* updates returns an accurate result, but concurrent updates that
* occur while the sum is being calculated might not be
* incorporated. Also, because floating-point arithmetic is not
* strictly associative, the returned result need not be identical
* to the value that would be obtained in a sequential series of
* updates to a single variable.
*
* @return the sum
*/
public double sum() {
Cell[] as = cells;
double sum = Double.longBitsToDouble(base);
if (as != null) {
int n = as.length;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null)
sum += Double.longBitsToDouble(a.value);
}
}
return sum;
}
/**
* Resets variables maintaining the sum to zero. This method may
* be a useful alternative to creating a new adder, but is only
* effective if there are no concurrent updates. Because this
* method is intrinsically racy, it should only be used when it is
* known that no threads are concurrently updating.
*/
public void reset() {
internalReset(0L);
}
/**
* Equivalent in effect to {@link #sum} followed by {@link
* #reset}. This method may apply for example during quiescent
* points between multithreaded computations. If there are
* updates concurrent with this method, the returned value is
* <em>not</em> guaranteed to be the final value occurring before
* the reset.
*
* @return the sum
*/
public double sumThenReset() {
Cell[] as = cells;
double sum = Double.longBitsToDouble(base);
base = 0L;
if (as != null) {
int n = as.length;
for (int i = 0; i < n; ++i) {
Cell a = as[i];
if (a != null) {
long v = a.value;
a.value = 0L;
sum += Double.longBitsToDouble(v);
}
}
}
return sum;
}
/**
* Returns the String representation of the {@link #sum}.
* @return the String representation of the {@link #sum}
*/
public String toString() {
return Double.toString(sum());
}
/**
* Equivalent to {@link #sum}.
*
* @return the sum
*/
public double doubleValue() {
return sum();
}
/**
* Returns the {@link #sum} as a {@code long} after a
* narrowing primitive conversion.
*/
public long longValue() {
return (long)sum();
}
/**
* Returns the {@link #sum} as an {@code int} after a
* narrowing primitive conversion.
*/
public int intValue() {
return (int)sum();
}
/**
* Returns the {@link #sum} as a {@code float}
* after a narrowing primitive conversion.
*/
public float floatValue() {
return (float)sum();
}
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
s.writeDouble(sum());
}
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
busy = 0;
cells = null;
base = Double.doubleToRawLongBits(s.readDouble());
}
}
| 0true
|
src_main_java_jsr166e_DoubleAdder.java
|
121 |
public abstract class OAbstractProfiler extends OSharedResourceAbstract implements OProfilerMBean {
protected long recordingFrom = -1;
protected final Map<String, OProfilerHookValue> hooks = new ConcurrentHashMap<String, OProfilerHookValue>();
protected final ConcurrentHashMap<String, String> dictionary = new ConcurrentHashMap<String, String>();
protected final ConcurrentHashMap<String, METRIC_TYPE> types = new ConcurrentHashMap<String, METRIC_TYPE>();
public interface OProfilerHookValue {
public Object getValue();
}
public OAbstractProfiler() {
}
public OAbstractProfiler(final OAbstractProfiler profiler) {
hooks.putAll(profiler.hooks);
dictionary.putAll(profiler.dictionary);
types.putAll(profiler.types);
}
public void shutdown() {
stopRecording();
}
public boolean startRecording() {
if (isRecording())
return false;
recordingFrom = System.currentTimeMillis();
return true;
}
public boolean stopRecording() {
if (!isRecording())
return false;
recordingFrom = -1;
return true;
}
public boolean isRecording() {
return recordingFrom > -1;
}
public void updateCounter(final String iStatName, final String iDescription, final long iPlus) {
updateCounter(iStatName, iDescription, iPlus, iStatName);
}
@Override
public String getName() {
return "profiler";
}
@Override
public void startup() {
startRecording();
}
@Override
public String dump() {
return null;
}
@Override
public String dumpCounters() {
return null;
}
@Override
public OProfilerEntry getChrono(String string) {
return null;
}
@Override
public long startChrono() {
return 0;
}
@Override
public long stopChrono(String iName, String iDescription, long iStartTime) {
return 0;
}
@Override
public long stopChrono(String iName, String iDescription, long iStartTime, String iDictionary) {
return 0;
}
@Override
public String dumpChronos() {
return null;
}
@Override
public String[] getCountersAsString() {
return null;
}
@Override
public String[] getChronosAsString() {
return null;
}
@Override
public Date getLastReset() {
return null;
}
@Override
public void setAutoDump(int iNewValue) {
}
@Override
public String metadataToJSON() {
return null;
}
@Override
public Map<String, OPair<String, METRIC_TYPE>> getMetadata() {
final Map<String, OPair<String, METRIC_TYPE>> metadata = new HashMap<String, OPair<String, METRIC_TYPE>>();
for (Entry<String, String> entry : dictionary.entrySet())
metadata.put(entry.getKey(), new OPair<String, METRIC_TYPE>(entry.getValue(), types.get(entry.getKey())));
return metadata;
}
public void registerHookValue(final String iName, final String iDescription, final METRIC_TYPE iType,
final OProfilerHookValue iHookValue) {
registerHookValue(iName, iDescription, iType, iHookValue, iName);
}
public void registerHookValue(final String iName, final String iDescription, final METRIC_TYPE iType,
final OProfilerHookValue iHookValue, final String iMetadataName) {
if (iName != null) {
unregisterHookValue(iName);
updateMetadata(iMetadataName, iDescription, iType);
hooks.put(iName, iHookValue);
}
}
@Override
public void unregisterHookValue(final String iName) {
if (iName != null)
hooks.remove(iName);
}
@Override
public String getSystemMetric(final String iMetricName) {
final StringBuilder buffer = new StringBuilder();
buffer.append("system.");
buffer.append(iMetricName);
return buffer.toString();
}
@Override
public String getProcessMetric(final String iMetricName) {
final StringBuilder buffer = new StringBuilder();
buffer.append("process.");
buffer.append(iMetricName);
return buffer.toString();
}
@Override
public String getDatabaseMetric(final String iDatabaseName, final String iMetricName) {
final StringBuilder buffer = new StringBuilder();
buffer.append("db.");
buffer.append(iDatabaseName != null ? iDatabaseName : "*");
buffer.append('.');
buffer.append(iMetricName);
return buffer.toString();
}
@Override
public String toJSON(String command, final String iPar1) {
return null;
}
/**
* Updates the metric metadata.
*/
protected void updateMetadata(final String iName, final String iDescription, final METRIC_TYPE iType) {
if (iDescription != null && dictionary.putIfAbsent(iName, iDescription) == null)
types.put(iName, iType);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_profiler_OAbstractProfiler.java
|
2,612 |
private class SendPingRequest implements Runnable {
private final DiscoveryNode node;
private SendPingRequest(DiscoveryNode node) {
this.node = node;
}
@Override
public void run() {
if (!running) {
return;
}
transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()), options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout),
new BaseTransportResponseHandler<PingResponse>() {
@Override
public PingResponse newInstance() {
return new PingResponse();
}
@Override
public void handleResponse(PingResponse response) {
if (!running) {
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
nodeFD.retryCount = 0;
threadPool.schedule(pingInterval, ThreadPool.Names.SAME, SendPingRequest.this);
}
}
@Override
public void handleException(TransportException exp) {
// check if the master node did not get switched on us...
if (!running) {
return;
}
if (exp instanceof ConnectTransportException) {
// ignore this one, we already handle it by registering a connection listener
return;
}
NodeFD nodeFD = nodesFD.get(node);
if (nodeFD != null) {
if (!nodeFD.running) {
return;
}
int retryCount = ++nodeFD.retryCount;
logger.trace("[node ] failed to ping [{}], retry [{}] out of [{}]", exp, node, retryCount, pingRetryCount);
if (retryCount >= pingRetryCount) {
logger.debug("[node ] failed to ping [{}], tried [{}] times, each with maximum [{}] timeout", node, pingRetryCount, pingRetryTimeout);
// not good, failure
if (nodesFD.remove(node) != null) {
notifyNodeFailure(node, "failed to ping, tried [" + pingRetryCount + "] times, each with maximum [" + pingRetryTimeout + "] timeout");
}
} else {
// resend the request, not reschedule, rely on send timeout
transportService.sendRequest(node, PingRequestHandler.ACTION, new PingRequest(node.id()),
options().withType(TransportRequestOptions.Type.PING).withTimeout(pingRetryTimeout), this);
}
}
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
}
| 1no label
|
src_main_java_org_elasticsearch_discovery_zen_fd_NodesFaultDetection.java
|
149 |
public abstract class KeyBasedClientRequest extends PartitionClientRequest {
protected abstract Object getKey();
protected final int getPartition() {
Object key = getKey();
InternalPartitionService partitionService = clientEngine.getPartitionService();
if (key instanceof String) {
return partitionService.getPartitionId(getPartitionKey((String) key));
}
return partitionService.getPartitionId(key);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_KeyBasedClientRequest.java
|
71 |
public interface StaticAssetStorageDao {
StaticAssetStorage create();
StaticAssetStorage readStaticAssetStorageById(Long id);
public StaticAssetStorage readStaticAssetStorageByStaticAssetId(Long id);
StaticAssetStorage save(StaticAssetStorage assetStorage);
void delete(StaticAssetStorage assetStorage);
public Blob createBlob(MultipartFile uploadedFile) throws IOException;
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_file_dao_StaticAssetStorageDao.java
|
338 |
public static class NodeRestartResponse extends NodeOperationResponse {
NodeRestartResponse() {
}
public NodeRestartResponse(DiscoveryNode node) {
super(node);
}
public static NodeRestartResponse readNodeRestartResponse(StreamInput in) throws IOException {
NodeRestartResponse res = new NodeRestartResponse();
res.readFrom(in);
return res;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_node_restart_NodesRestartResponse.java
|
27 |
static final class RunAfterEither extends Completion {
final CompletableFuture<?> src;
final CompletableFuture<?> snd;
final Runnable fn;
final CompletableFuture<Void> dst;
final Executor executor;
RunAfterEither(CompletableFuture<?> src,
CompletableFuture<?> snd,
Runnable fn,
CompletableFuture<Void> dst,
Executor executor) {
this.src = src; this.snd = snd;
this.fn = fn; this.dst = dst;
this.executor = executor;
}
public final void run() {
final CompletableFuture<?> a;
final CompletableFuture<?> b;
final Runnable fn;
final CompletableFuture<Void> dst;
Object r; Throwable ex;
if ((dst = this.dst) != null &&
(fn = this.fn) != null &&
(((a = this.src) != null && (r = a.result) != null) ||
((b = this.snd) != null && (r = b.result) != null)) &&
compareAndSet(0, 1)) {
if (r instanceof AltResult)
ex = ((AltResult)r).ex;
else
ex = null;
Executor e = executor;
if (ex == null) {
try {
if (e != null)
e.execute(new AsyncRun(fn, dst));
else
fn.run();
} catch (Throwable rex) {
ex = rex;
}
}
if (e == null || ex != null)
dst.internalComplete(null, ex);
}
}
private static final long serialVersionUID = 5232453952276885070L;
}
| 0true
|
src_main_java_jsr166e_CompletableFuture.java
|
14 |
public interface TextCommandService {
boolean offer(String queueName, Object value);
Object poll(String queueName, int seconds);
Object poll(String queueName);
void processRequest(TextCommand command);
void sendResponse(TextCommand textCommand);
Object get(String mapName, String key);
byte[] getByteArray(String mapName, String key);
Object put(String mapName, String key, Object value);
Object put(String mapName, String key, Object value, int ttlSeconds);
Object putIfAbsent(String mapName, String key, Object value, int ttlSeconds);
Object replace(String mapName, String key, Object value);
void lock(String mapName, String key) throws InterruptedException;
void unlock(String mapName, String key);
int getAdjustedTTLSeconds(int ttl);
long incrementDeleteHitCount(int inc);
long incrementDeleteMissCount();
long incrementGetHitCount();
long incrementGetMissCount();
long incrementSetCount();
long incrementIncHitCount();
long incrementIncMissCount();
long incrementDecrHitCount();
long incrementDecrMissCount();
long incrementTouchCount();
/**
* Returns the size of the distributed queue instance with the specified name
* @param queueName name of the distributed queue
* @return the size of the distributed queue instance with the specified name
*/
int size(String queueName);
Object delete(String mapName, String key);
void deleteAll(String mapName);
Stats getStats();
Node getNode();
byte[] toByteArray(Object value);
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_ascii_TextCommandService.java
|
838 |
public class ShardSearchFailure implements ShardOperationFailedException {
public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0];
private SearchShardTarget shardTarget;
private String reason;
private RestStatus status;
private transient Throwable failure;
private ShardSearchFailure() {
}
@Nullable
public Throwable failure() {
return failure;
}
public ShardSearchFailure(Throwable t) {
this(t, null);
}
public ShardSearchFailure(Throwable t, @Nullable SearchShardTarget shardTarget) {
this.failure = t;
Throwable actual = ExceptionsHelper.unwrapCause(t);
if (actual != null && actual instanceof SearchException) {
this.shardTarget = ((SearchException) actual).shard();
} else if (shardTarget != null) {
this.shardTarget = shardTarget;
}
if (actual != null && actual instanceof ElasticsearchException) {
status = ((ElasticsearchException) actual).status();
} else {
status = RestStatus.INTERNAL_SERVER_ERROR;
}
this.reason = ExceptionsHelper.detailedMessage(t);
}
public ShardSearchFailure(String reason, SearchShardTarget shardTarget) {
this(reason, shardTarget, RestStatus.INTERNAL_SERVER_ERROR);
}
public ShardSearchFailure(String reason, SearchShardTarget shardTarget, RestStatus status) {
this.shardTarget = shardTarget;
this.reason = reason;
this.status = status;
}
/**
* The search shard target the failure occurred on.
*/
@Nullable
public SearchShardTarget shard() {
return this.shardTarget;
}
public RestStatus status() {
return this.status;
}
/**
* The index the search failed on.
*/
@Override
public String index() {
if (shardTarget != null) {
return shardTarget.index();
}
return null;
}
/**
* The shard id the search failed on.
*/
@Override
public int shardId() {
if (shardTarget != null) {
return shardTarget.shardId();
}
return -1;
}
/**
* The reason of the failure.
*/
public String reason() {
return this.reason;
}
@Override
public String toString() {
return "shard [" + (shardTarget == null ? "_na" : shardTarget) + "], reason [" + reason + "]";
}
public static ShardSearchFailure readShardSearchFailure(StreamInput in) throws IOException {
ShardSearchFailure shardSearchFailure = new ShardSearchFailure();
shardSearchFailure.readFrom(in);
return shardSearchFailure;
}
@Override
public void readFrom(StreamInput in) throws IOException {
if (in.readBoolean()) {
shardTarget = readSearchShardTarget(in);
}
reason = in.readString();
status = RestStatus.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (shardTarget == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
shardTarget.writeTo(out);
}
out.writeString(reason);
RestStatus.writeTo(out, status);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_search_ShardSearchFailure.java
|
319 |
public class OStorageDataHoleConfiguration extends OStorageFileConfiguration {
private static final long serialVersionUID = 1L;
private static final String DEF_EXTENSION = ".odh";
private static final String DEF_INCREMENT_SIZE = "50%";
public OStorageDataHoleConfiguration() {
}
public OStorageDataHoleConfiguration(OStorageSegmentConfiguration iParent, String iPath, String iType, String iMaxSize) {
super(iParent, iPath + DEF_EXTENSION, iType, iMaxSize, DEF_INCREMENT_SIZE);
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_config_OStorageDataHoleConfiguration.java
|
271 |
public class MapPutRunnable implements Runnable, DataSerializable, HazelcastInstanceAware {
private HazelcastInstance instance;
public String mapName;
public MapPutRunnable(){}
public MapPutRunnable(String mapName) {
this.mapName = mapName;
}
public void writeData(ObjectDataOutput out) throws IOException {
out.writeUTF(mapName);
}
public void readData(ObjectDataInput in) throws IOException {
mapName = in.readUTF();
}
public void run() {
Member member = instance.getCluster().getLocalMember();
IMap map = instance.getMap(mapName);
map.put(member.getUuid(), member.getUuid()+"value");
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
instance = hazelcastInstance;
}
public String getMapName() {
return mapName;
}
public void setMapName(String mapName) {
this.mapName = mapName;
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_executor_tasks_MapPutRunnable.java
|
962 |
public abstract class NodesOperationRequest<T extends NodesOperationRequest> extends ActionRequest<T> {
public static String[] ALL_NODES = Strings.EMPTY_ARRAY;
private String[] nodesIds;
private TimeValue timeout;
protected NodesOperationRequest() {
}
protected NodesOperationRequest(String... nodesIds) {
this.nodesIds = nodesIds;
}
public final String[] nodesIds() {
return nodesIds;
}
@SuppressWarnings("unchecked")
public final T nodesIds(String... nodesIds) {
this.nodesIds = nodesIds;
return (T) this;
}
public TimeValue timeout() {
return this.timeout;
}
@SuppressWarnings("unchecked")
public final T timeout(TimeValue timeout) {
this.timeout = timeout;
return (T) this;
}
@SuppressWarnings("unchecked")
public final T timeout(String timeout) {
this.timeout = TimeValue.parseTimeValue(timeout, null);
return (T) this;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
nodesIds = in.readStringArray();
if (in.readBoolean()) {
timeout = TimeValue.readTimeValue(in);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(nodesIds);
if (timeout == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
timeout.writeTo(out);
}
}
}
| 0true
|
src_main_java_org_elasticsearch_action_support_nodes_NodesOperationRequest.java
|
325 |
public class OStoragePaginatedClusterConfiguration implements OStorageClusterConfiguration {
public static float DEFAULT_GROW_FACTOR = (float) 1.2;
public transient OStorageConfiguration root;
public int id;
public String name;
public String location;
public boolean useWal = true;
public float recordOverflowGrowFactor = DEFAULT_GROW_FACTOR;
public float recordGrowFactor = DEFAULT_GROW_FACTOR;
public String compression = OGlobalConfiguration.STORAGE_COMPRESSION_METHOD
.getValueAsString();
public OStoragePaginatedClusterConfiguration(OStorageConfiguration root, int id, String name, String location, boolean useWal,
float recordOverflowGrowFactor, float recordGrowFactor, String compression) {
this.root = root;
this.id = id;
this.name = name;
this.location = location;
this.useWal = useWal;
this.recordOverflowGrowFactor = recordOverflowGrowFactor;
this.recordGrowFactor = recordGrowFactor;
this.compression = compression;
}
@Override
public int getId() {
return id;
}
@Override
public String getName() {
return name;
}
@Override
public String getLocation() {
return location;
}
@Override
public int getDataSegmentId() {
return -1;
}
}
| 0true
|
core_src_main_java_com_orientechnologies_orient_core_config_OStoragePaginatedClusterConfiguration.java
|
546 |
private static class TransactionalObjectKey {
private final String serviceName;
private final String name;
TransactionalObjectKey(String serviceName, String name) {
this.serviceName = serviceName;
this.name = name;
}
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TransactionalObjectKey)) {
return false;
}
TransactionalObjectKey that = (TransactionalObjectKey) o;
if (!name.equals(that.name)) {
return false;
}
if (!serviceName.equals(that.serviceName)) {
return false;
}
return true;
}
public int hashCode() {
int result = serviceName.hashCode();
result = 31 * result + name.hashCode();
return result;
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_txn_TransactionContextProxy.java
|
1,518 |
public class SideEffectMap {
public static final String CLASS = Tokens.makeNamespace(SideEffectMap.class) + ".class";
public static final String CLOSURE = Tokens.makeNamespace(SideEffectMap.class) + ".closure";
private static final ScriptEngine engine = new GremlinGroovyScriptEngine();
public enum Counters {
VERTICES_PROCESSED,
IN_EDGES_PROCESSED,
OUT_EDGES_PROCESSED
}
public static Configuration createConfiguration(final Class<? extends Element> klass, final String closure) {
final Configuration configuration = new EmptyConfiguration();
configuration.setClass(CLASS, klass, Element.class);
configuration.set(CLOSURE, closure);
return configuration;
}
public static class Map extends Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex> {
private Closure closure;
private boolean isVertex;
@Override
public void setup(final Mapper.Context context) throws IOException, InterruptedException {
this.isVertex = context.getConfiguration().getClass(CLASS, Element.class, Element.class).equals(Vertex.class);
try {
this.closure = (Closure) engine.eval(context.getConfiguration().get(CLOSURE));
} catch (final ScriptException e) {
throw new IOException(e.getMessage(), e);
}
}
@Override
public void map(final NullWritable key, final FaunusVertex value, final Mapper<NullWritable, FaunusVertex, NullWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (this.isVertex) {
if (value.hasPaths()) {
//for (int i = 0; i < value.pathCount(); i++) {
this.closure.call(value);
//}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_PROCESSED, 1L);
}
} else {
long edgesProcessed = 0;
for (final Edge e : value.getEdges(Direction.IN)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
edgesProcessed++;
//for (int i = 0; i < edge.pathCount(); i++) {
this.closure.call(edge);
//}
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.IN_EDGES_PROCESSED, edgesProcessed);
edgesProcessed = 0;
for (final Edge e : value.getEdges(Direction.OUT)) {
final StandardFaunusEdge edge = (StandardFaunusEdge) e;
if (edge.hasPaths()) {
edgesProcessed++;
//for (int i = 0; i < edge.pathCount(); i++) {
this.closure.call(edge);
//}
}
}
DEFAULT_COMPAT.incrementContextCounter(context, Counters.OUT_EDGES_PROCESSED, edgesProcessed);
}
context.write(NullWritable.get(), value);
}
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_mapreduce_sideeffect_SideEffectMap.java
|
129 |
public class TestApplyTransactions
{
@Test
public void testCommittedTransactionReceivedAreForcedToLog() throws Exception
{
/* GIVEN
* Create a tx on a db (as if the master), extract that, apply on dest (as if pullUpdate on slave).
* Let slave crash uncleanly.
*/
File baseStoreDir = new File( "base" );
File originStoreDir = new File( baseStoreDir, "origin" );
File destStoreDir = new File( baseStoreDir, "destination" );
GraphDatabaseAPI origin = (GraphDatabaseAPI) new TestGraphDatabaseFactory().setFileSystem( fs.get() )
.newImpermanentDatabase( originStoreDir.getPath() );
Transaction tx = origin.beginTx();
origin.createNode();
tx.success();
tx.finish();
XaDataSource originNeoDataSource = xaDs( origin );
int latestTxId = (int) originNeoDataSource.getLastCommittedTxId();
InMemoryLogBuffer theTx = new InMemoryLogBuffer();
originNeoDataSource.getLogExtractor( latestTxId, latestTxId ).extractNext( theTx );
final GraphDatabaseAPI dest = (GraphDatabaseAPI) new TestGraphDatabaseFactory().setFileSystem( fs.get() )
.newImpermanentDatabase( destStoreDir.getPath() );
XaDataSource destNeoDataSource = xaDs( dest );
destNeoDataSource.applyCommittedTransaction( latestTxId, theTx );
origin.shutdown();
EphemeralFileSystemAbstraction snapshot = fs.snapshot( shutdownDb( dest ) );
/*
* Open crashed db, try to extract the transaction it reports as latest. It should be there.
*/
GraphDatabaseAPI newDest = (GraphDatabaseAPI) new TestGraphDatabaseFactory().setFileSystem( snapshot )
.newImpermanentDatabase( destStoreDir.getPath() );
destNeoDataSource = newDest.getDependencyResolver().resolveDependency( XaDataSourceManager.class )
.getXaDataSource( NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME );
latestTxId = (int) destNeoDataSource.getLastCommittedTxId();
theTx = new InMemoryLogBuffer();
long extractedTxId = destNeoDataSource.getLogExtractor( latestTxId, latestTxId ).extractNext( theTx );
assertEquals( latestTxId, extractedTxId );
}
private XaDataSource xaDs( GraphDatabaseAPI origin )
{
return origin.getDependencyResolver().resolveDependency( XaDataSourceManager.class ).getXaDataSource(
NeoStoreXaDataSource.DEFAULT_DATA_SOURCE_NAME );
}
@Test
public void verifyThatRecoveredTransactionsHaveTheirDoneRecordsWrittenInOrder() throws IOException
{
XaDataSource ds;
File archivedLogFilename;
File originStoreDir = new File( new File( "base" ), "origin" );
String logicalLogFilename = "logicallog";
final GraphDatabaseAPI db1 = (GraphDatabaseAPI) new TestGraphDatabaseFactory()
.setFileSystem( fs.get() )
.newImpermanentDatabaseBuilder( originStoreDir.getPath() )
.setConfig( InternalAbstractGraphDatabase.Configuration.logical_log, logicalLogFilename )
.newGraphDatabase();
for ( int i = 0; i < 100; i++ )
{
Transaction tx = db1.beginTx();
db1.createNode();
tx.success();
tx.finish();
}
ds = xaDs( db1 );
archivedLogFilename = ds.getFileName( ds.getCurrentLogVersion() );
fs.snapshot( new Runnable()
{
@Override
public void run()
{
db1.shutdown();
}
} );
removeDoneEntriesFromLog( new File( archivedLogFilename.getParent(), logicalLogFilename + ".1" ) );
GraphDatabaseAPI db2 = (GraphDatabaseAPI) new TestGraphDatabaseFactory()
.setFileSystem( fs.get() )
.newImpermanentDatabaseBuilder( originStoreDir.getPath() )
.setConfig( InternalAbstractGraphDatabase.Configuration.logical_log, logicalLogFilename )
.newGraphDatabase();
ds = xaDs( db2 );
archivedLogFilename = ds.getFileName( ds.getCurrentLogVersion() );
db2.shutdown();
List<LogEntry> logEntries = filterDoneEntries( logEntries( fs.get(), archivedLogFilename ) );
String errorMessage = "DONE entries should be in order: " + logEntries;
int prev = 0;
for ( LogEntry entry : logEntries )
{
int current = entry.getIdentifier();
assertThat( errorMessage, current, greaterThan( prev ) );
prev = current;
}
}
private void removeDoneEntriesFromLog( File archivedLogFilename ) throws IOException
{
LogTestUtils.LogHook<LogEntry> doneEntryFilter = new LogTestUtils.LogHookAdapter<LogEntry>()
{
@Override
public boolean accept( LogEntry item )
{
return !(item instanceof LogEntry.Done);
}
};
EphemeralFileSystemAbstraction fsa = fs.get();
File tempFile = filterNeostoreLogicalLog( fsa, archivedLogFilename, doneEntryFilter );
fsa.deleteFile( archivedLogFilename );
fsa.renameFile( tempFile, archivedLogFilename );
}
private List<LogEntry> filterDoneEntries( List<LogEntry> logEntries )
{
Predicate<? super LogEntry> doneEntryPredicate = new Predicate<LogEntry>()
{
@Override
public boolean accept( LogEntry item )
{
return item instanceof LogEntry.Done;
}
};
return Iterables.toList( Iterables.filter( doneEntryPredicate, logEntries ) );
}
@Rule public EphemeralFileSystemRule fs = new EphemeralFileSystemRule();
}
| 0true
|
community_kernel_src_test_java_org_neo4j_kernel_impl_transaction_xaframework_TestApplyTransactions.java
|
134 |
@Test
public class UnsafeConverterTest extends AbstractConverterTest {
@BeforeClass
public void beforeClass() {
converter = new OUnsafeBinaryConverter();
}
@Override
public void testPutIntBigEndian() {
super.testPutIntBigEndian();
}
@Override
public void testPutIntLittleEndian() {
super.testPutIntLittleEndian();
}
@Override
public void testPutLongBigEndian() {
super.testPutLongBigEndian();
}
@Override
public void testPutLongLittleEndian() {
super.testPutLongLittleEndian();
}
@Override
public void testPutShortBigEndian() {
super.testPutShortBigEndian();
}
@Override
public void testPutShortLittleEndian() {
super.testPutShortLittleEndian();
}
@Override
public void testPutCharBigEndian() {
super.testPutCharBigEndian();
}
@Override
public void testPutCharLittleEndian() {
super.testPutCharLittleEndian();
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_UnsafeConverterTest.java
|
1,717 |
@Service("blDynamicEntityRemoteService")
@Transactional(value="blTransactionManager", rollbackFor = ServiceException.class)
public class DynamicEntityRemoteService implements DynamicEntityService, DynamicEntityRemote, ApplicationContextAware {
public static final String DEFAULTPERSISTENCEMANAGERREF = "blPersistenceManager";
private static final Log LOG = LogFactory.getLog(DynamicEntityRemoteService.class);
protected static final Map<BatchPersistencePackage, BatchDynamicResultSet> METADATA_CACHE = MapUtils.synchronizedMap(new LRUMap<BatchPersistencePackage, BatchDynamicResultSet>(100, 1000));
protected String persistenceManagerRef = DEFAULTPERSISTENCEMANAGERREF;
private ApplicationContext applicationContext;
@Resource(name="blExploitProtectionService")
protected ExploitProtectionService exploitProtectionService;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
this.applicationContext = applicationContext;
}
protected ServiceException recreateSpecificServiceException(ServiceException e, String message, Throwable cause) {
try {
ServiceException newException;
if (cause == null) {
Constructor constructor = e.getClass().getConstructor(String.class);
newException = (ServiceException) constructor.newInstance(message);
} else {
Constructor constructor = e.getClass().getConstructor(String.class, Throwable.class);
newException = (ServiceException) constructor.newInstance(message, cause);
}
return newException;
} catch (Exception e1) {
throw new RuntimeException(e1);
}
}
@Override
public DynamicResultSet inspect(PersistencePackage persistencePackage) throws ServiceException {
String ceilingEntityFullyQualifiedClassname = persistencePackage.getCeilingEntityFullyQualifiedClassname();
try {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(persistenceManagerRef);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
return persistenceManager.inspect(persistencePackage);
} catch (ServiceException e) {
String message = exploitProtectionService.cleanString(e.getMessage());
throw recreateSpecificServiceException(e, message, e.getCause());
} catch (Exception e) {
LOG.error("Problem inspecting results for " + ceilingEntityFullyQualifiedClassname, e);
throw new ServiceException(exploitProtectionService.cleanString("Unable to fetch results for " + ceilingEntityFullyQualifiedClassname), e);
}
}
@Override
public DynamicResultSet fetch(PersistencePackage persistencePackage, CriteriaTransferObject cto) throws ServiceException {
try {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(persistenceManagerRef);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
return persistenceManager.fetch(persistencePackage, cto);
} catch (ServiceException e) {
LOG.error("Problem fetching results for " + persistencePackage.getCeilingEntityFullyQualifiedClassname(), e);
String message = exploitProtectionService.cleanString(e.getMessage());
throw recreateSpecificServiceException(e, message, e.getCause());
}
}
protected void cleanEntity(Entity entity) throws ServiceException {
Property currentProperty = null;
try {
for (Property property : entity.getProperties()) {
currentProperty = property;
property.setRawValue(property.getValue());
property.setValue(exploitProtectionService.cleanStringWithResults(property.getValue()));
property.setUnHtmlEncodedValue(StringEscapeUtils.unescapeHtml(property.getValue()));
}
} catch (CleanStringException e) {
StringBuilder sb = new StringBuilder();
for (int j=0;j<e.getCleanResults().getNumberOfErrors();j++){
sb.append(j+1);
sb.append(") ");
sb.append((String) e.getCleanResults().getErrorMessages().get(j));
sb.append("\n");
}
sb.append("\nNote - ");
sb.append(exploitProtectionService.getAntiSamyPolicyFileLocation());
sb.append(" policy in effect. Set a new policy file to modify validation behavior/strictness.");
entity.addValidationError(currentProperty.getName(), sb.toString());
}
}
@Override
public Entity add(PersistencePackage persistencePackage) throws ServiceException {
cleanEntity(persistencePackage.getEntity());
if (persistencePackage.getEntity().isValidationFailure()) {
return persistencePackage.getEntity();
}
try {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(persistenceManagerRef);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
return persistenceManager.add(persistencePackage);
} catch (ServiceException e) {
//immediately throw validation exceptions without printing a stack trace
if (e instanceof ValidationException) {
throw e;
} else if (e.getCause() instanceof ValidationException) {
throw (ValidationException) e.getCause();
}
String message = exploitProtectionService.cleanString(e.getMessage());
LOG.error("Problem adding new " + persistencePackage.getCeilingEntityFullyQualifiedClassname(), e);
throw recreateSpecificServiceException(e, message, e.getCause());
}
}
@Override
public Entity update(PersistencePackage persistencePackage) throws ServiceException {
cleanEntity(persistencePackage.getEntity());
if (persistencePackage.getEntity().isValidationFailure()) {
return persistencePackage.getEntity();
}
try {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(persistenceManagerRef);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
return persistenceManager.update(persistencePackage);
} catch (ServiceException e) {
//immediately throw validation exceptions without printing a stack trace
if (e instanceof ValidationException) {
throw e;
} else if (e.getCause() instanceof ValidationException) {
throw (ValidationException) e.getCause();
}
LOG.error("Problem updating " + persistencePackage.getCeilingEntityFullyQualifiedClassname(), e);
String message = exploitProtectionService.cleanString(e.getMessage());
throw recreateSpecificServiceException(e, message, e.getCause());
}
}
@Override
public void remove(PersistencePackage persistencePackage) throws ServiceException {
try {
PersistenceManager persistenceManager = (PersistenceManager) applicationContext.getBean(persistenceManagerRef);
persistenceManager.setTargetMode(TargetModeType.SANDBOX);
persistenceManager.remove(persistencePackage);
} catch (ServiceException e) {
LOG.error("Problem removing " + persistencePackage.getCeilingEntityFullyQualifiedClassname(), e);
String message = exploitProtectionService.cleanString(e.getMessage());
throw recreateSpecificServiceException(e, message, e.getCause());
}
}
@Override
public String getPersistenceManagerRef() {
return persistenceManagerRef;
}
@Override
public void setPersistenceManagerRef(String persistenceManagerRef) {
this.persistenceManagerRef = persistenceManagerRef;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_DynamicEntityRemoteService.java
|
1,271 |
@Deprecated
public class ShippingServiceType implements Serializable, BroadleafEnumerationType {
private static final long serialVersionUID = 1L;
private static final Map<String, ShippingServiceType> TYPES = new LinkedHashMap<String, ShippingServiceType>();
public static final ShippingServiceType BANDED_SHIPPING = new ShippingServiceType("BANDED_SHIPPING", "Banded Shipping");
public static final ShippingServiceType USPS = new ShippingServiceType("USPS", "United States Postal Service");
public static final ShippingServiceType FED_EX = new ShippingServiceType("FED_EX", "Federal Express");
public static final ShippingServiceType UPS = new ShippingServiceType("UPS", "United Parcel Service");
public static final ShippingServiceType DHL = new ShippingServiceType("DHL", "DHL");
public static ShippingServiceType getInstance(final String type) {
return TYPES.get(type);
}
private String type;
private String friendlyType;
public ShippingServiceType() {
//do nothing
}
public ShippingServiceType(final String type, final String friendlyType) {
this.friendlyType = friendlyType;
setType(type);
}
@Override
public String getType() {
return type;
}
@Override
public String getFriendlyType() {
return friendlyType;
}
private void setType(final String type) {
this.type = type;
if (!TYPES.containsKey(type)) {
TYPES.put(type, this);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ShippingServiceType other = (ShippingServiceType) obj;
if (type == null) {
if (other.type != null)
return false;
} else if (!type.equals(other.type))
return false;
return true;
}
}
| 1no label
|
core_broadleaf-framework_src_main_java_org_broadleafcommerce_core_pricing_service_workflow_type_ShippingServiceType.java
|
842 |
public abstract class AbstractAlterRequest extends PartitionClientRequest implements Portable, SecureRequest {
protected String name;
protected Data function;
public AbstractAlterRequest() {
}
public AbstractAlterRequest(String name, Data function) {
this.name = name;
this.function = function;
}
@Override
protected int getPartition() {
Data key = getClientEngine().getSerializationService().toData(name);
return getClientEngine().getPartitionService().getPartitionId(key);
}
@Override
public String getServiceName() {
return AtomicReferenceService.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return AtomicReferencePortableHook.F_ID;
}
@Override
public void write(PortableWriter writer) throws IOException {
writer.writeUTF("n", name);
final ObjectDataOutput out = writer.getRawDataOutput();
IOUtil.writeNullableData(out, function);
}
@Override
public void read(PortableReader reader) throws IOException {
name = reader.readUTF("n");
ObjectDataInput in = reader.getRawDataInput();
function = IOUtil.readNullableData(in);
}
@Override
public Permission getRequiredPermission() {
return new AtomicReferencePermission(name, ActionConstants.ACTION_MODIFY);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_concurrent_atomicreference_client_AbstractAlterRequest.java
|
1,188 |
public class CeylonBuilder extends IncrementalProjectBuilder {
public static final String CEYLON_CLASSES_FOLDER_NAME = ".exploded";
/**
* Extension ID of the Ceylon builder, which matches the ID in the
* corresponding extension definition in plugin.xml.
*/
public static final String BUILDER_ID = PLUGIN_ID + ".ceylonBuilder";
/**
* A marker ID that identifies problems
*/
public static final String PROBLEM_MARKER_ID = PLUGIN_ID + ".ceylonProblem";
/**
* A marker ID that identifies module dependency problems
*/
public static final String MODULE_DEPENDENCY_PROBLEM_MARKER_ID = PLUGIN_ID + ".ceylonModuleDependencyProblem";
/**
* A marker ID that identifies character encoding problems
*/
public static final String CHARSET_PROBLEM_MARKER_ID = PLUGIN_ID + ".ceylonCharsetProblem";
/**
* A marker ID that identifies character encoding problems
*/
public static final String CEYLON_CONFIG_NOT_IN_SYNC_MARKER = PLUGIN_ID + ".ceylonConfigProblem";
/**
* A marker ID that identifies tasks
*/
public static final String TASK_MARKER_ID = PLUGIN_ID + ".ceylonTask";
public static final String SOURCE = "Ceylon";
static {
ProducedTypeCache.setEnabledByDefault(false);
}
public static <T> T doWithCeylonModelCaching(final Callable<T> action)
throws CoreException {
boolean was = ProducedTypeCache.setEnabled(true);
try {
return action.call();
} catch(CoreException ce) {
throw ce;
} catch(Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
} finally {
ProducedTypeCache.setEnabled(was);
}
}
private static final class BuildFileManager extends CeyloncFileManager {
private final IProject project;
final boolean explodeModules;
private Map<RegularFileObject, Set<String>> inputFilesToGenerate = null;
private BuildFileManager(com.sun.tools.javac.util.Context context,
boolean register, Charset charset, IProject project, Map<RegularFileObject, Set<String>> inputFilesToGenerate) {
super(context, register, charset);
this.project = project;
explodeModules = isExplodeModulesEnabled(project);
this.inputFilesToGenerate = inputFilesToGenerate;
}
public static RegularFileObject getSourceFile(FileObject fileObject) {
JavaFileObject sourceJavaFileObject;
if (fileObject instanceof JavaFileObject
&& ((JavaFileObject) fileObject).getKind() == javax.tools.JavaFileObject.Kind.SOURCE){
if (fileObject instanceof CeylonFileObject) {
sourceJavaFileObject = ((CeylonFileObject) fileObject).getFile();
} else {
sourceJavaFileObject = (JavaFileObject) fileObject;
}
if (sourceJavaFileObject instanceof RegularFileObject) {
return ((RegularFileObject) sourceJavaFileObject);
}
}
return null;
}
@Override
protected JavaFileObject getFileForOutput(Location location,
final RelativeFile fileName, FileObject sibling)
throws IOException {
RegularFileObject sourceFile = getSourceFile(sibling);
if (sourceFile != null) {
Set<String> expectedClasses = inputFilesToGenerate.get(sourceFile);
String shortname = fileName.basename();
if (shortname.endsWith(".class")) {
shortname = shortname.substring(0, shortname.length() - 6);
}
expectedClasses.remove(shortname);
if (expectedClasses.isEmpty()) {
inputFilesToGenerate.remove(sourceFile);
}
}
JavaFileObject javaFileObject = super.getFileForOutput(location, fileName, sibling);
if (explodeModules &&
javaFileObject instanceof JarEntryFileObject &&
sibling instanceof CeylonFileObject) {
final File ceylonOutputDirectory = getCeylonClassesOutputDirectory(project);
final File classFile = fileName.getFile(ceylonOutputDirectory);
classFile.getParentFile().mkdirs();
return new ExplodingJavaFileObject(classFile, fileName,
javaFileObject);
}
return javaFileObject;
}
@Override
protected String getCurrentWorkingDir() {
return project.getLocation().toFile().getAbsolutePath();
}
public void addUngeneratedErrors() {
if (inputFilesToGenerate.size() > 0) {
try {
String markerId = PROBLEM_MARKER_ID + ".backend";
String message = "Some classes are missing from the generated module archives, probably because of an unexpected error in the Java backend compiler.\n"
+ "The detail of missing classes is given in the Information markers.";
IMarker marker = project.createMarker(markerId);
marker.setAttribute(IMarker.MESSAGE, message);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
}
catch (CoreException ce) {
ce.printStackTrace();
}
}
for (RegularFileObject sourceFileNotGenerated : inputFilesToGenerate.keySet()) {
IPath absolutePath = new Path(sourceFileNotGenerated.getName());
IFile file = null;
for (IFolder sourceDirectory : CeylonBuilder.getSourceFolders(project)) {
IPath sourceDirPath = sourceDirectory.getLocation();
if (sourceDirPath.isPrefixOf(absolutePath)) {
IResource r = sourceDirectory.findMember(absolutePath.makeRelativeTo(sourceDirPath));
if (r instanceof IFile) {
file = (IFile) r;
}
}
}
if (file == null) {
file = getWorkspace().getRoot()
.getFileForLocation(new Path(sourceFileNotGenerated.getName()));
}
if (file != null) {
try {
String markerId = PROBLEM_MARKER_ID + ".backend";
String message = "The following classes were not generated by the backend :";
Iterator<String> classes = inputFilesToGenerate.get(sourceFileNotGenerated).iterator();
String line = "";
if (classes.hasNext()) {
line += "\n " + classes.next();
}
while (classes.hasNext()) {
if (line.length() > 70) {
message += line;
line = "\n ";
} else {
line += ", ";
}
line += classes.next();
}
if (! line.trim().isEmpty()) {
message += line;
}
IMarker marker = file.createMarker(markerId);
marker.setAttribute(IMarker.MESSAGE, message);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_INFO);
}
catch (CoreException ce) {
ce.printStackTrace();
}
}
}
}
}
public static enum ModelState {
Missing,
Parsing,
Parsed,
TypeChecking,
TypeChecked,
Compiled
};
final static Map<IProject, ModelState> modelStates = new HashMap<IProject, ModelState>();
private final static Map<IProject, TypeChecker> typeCheckers = new HashMap<IProject, TypeChecker>();
private final static Map<IProject, List<IFile>> projectFiles = new HashMap<IProject, List<IFile>>();
private static Set<IProject> containersInitialized = new HashSet<IProject>();
private final static Map<IProject, RepositoryManager> projectRepositoryManagers = new HashMap<IProject, RepositoryManager>();
private final static Map<IProject, ModuleDependencies> projectModuleDependencies = new HashMap<IProject, ModuleDependencies>();
private final static Set<ICeylonModelListener> modelListeners = new LinkedHashSet<ICeylonModelListener>();
public static void addModelListener(ICeylonModelListener listener) {
modelListeners.add(listener);
}
public static void removeModelListener(ICeylonModelListener listener) {
modelListeners.remove(listener);
}
public static final String CEYLON_CONSOLE= "Ceylon Build";
//private long startTime;
public static ModelState getModelState(IProject project) {
ModelState modelState = modelStates.get(project);
if (modelState == null) {
return ModelState.Missing;
}
return modelState;
}
public static boolean isModelTypeChecked(IProject project) {
ModelState modelState = getModelState(project);
return modelState.ordinal() >= ModelState.TypeChecked.ordinal();
}
public static boolean isModelParsed(IProject project) {
ModelState modelState = getModelState(project);
return modelState.ordinal() >= ModelState.Parsed.ordinal();
}
public static List<PhasedUnit> getUnits(IProject project) {
if (! isModelParsed(project)) {
return Collections.emptyList();
}
List<PhasedUnit> result = new ArrayList<PhasedUnit>();
TypeChecker tc = typeCheckers.get(project);
if (tc!=null) {
for (PhasedUnit pu: tc.getPhasedUnits().getPhasedUnits()) {
result.add(pu);
}
}
return result;
}
public static List<PhasedUnit> getUnits() {
List<PhasedUnit> result = new ArrayList<PhasedUnit>();
for (IProject project : typeCheckers.keySet()) {
if (isModelParsed(project)) {
TypeChecker tc = typeCheckers.get(project);
for (PhasedUnit pu: tc.getPhasedUnits().getPhasedUnits()) {
result.add(pu);
}
}
}
return result;
}
public static List<PhasedUnit> getUnits(String[] projects) {
List<PhasedUnit> result = new ArrayList<PhasedUnit>();
if (projects!=null) {
for (Map.Entry<IProject, TypeChecker> me: typeCheckers.entrySet()) {
for (String pname: projects) {
if (me.getKey().getName().equals(pname)) {
IProject project = me.getKey();
if (isModelParsed(project)) {
result.addAll(me.getValue().getPhasedUnits().getPhasedUnits());
}
}
}
}
}
return result;
}
public String getBuilderID() {
return BUILDER_ID;
}
public static boolean isCeylon(IFile file) {
String ext = file.getFileExtension();
return ext!=null && ext.equals("ceylon");
}
public static boolean isJava(IFile file) {
return JavaCore.isJavaLikeFileName(file.getName());
}
public static boolean isJavascript(IFile file) {
String ext = file.getFileExtension();
return ext!=null && ext.equals("js");
}
/*
* A source file is compilable and located in a Ceylon source folder
*/
public static boolean isSourceFile(IFile file) {
// If the file is not in a ceylon source folder
// it's not considered as a source file
// even if it is compilable
return isCompilable(file) && isInSourceFolder(file);
}
public static boolean isCompilable(IFile file) {
if (isCeylon(file)) {
return true;
}
if (isJava(file) && compileToJava(file.getProject())) {
return true;
}
if (isJavascript(file) && compileToJs(file.getProject())) {
return true;
}
return false;
}
public static boolean isResourceFile(IFile file) {
RootFolderType rootFolderType = getRootFolderType(file);
return rootFolderType == RootFolderType.RESOURCE;
}
public static JDTModelLoader getModelLoader(TypeChecker tc) {
return (JDTModelLoader) ((JDTModuleManager) tc.getPhasedUnits()
.getModuleManager()).getModelLoader();
}
public static JDTModelLoader getProjectModelLoader(IProject project) {
TypeChecker typeChecker = getProjectTypeChecker(project);
if (typeChecker == null) {
return null;
}
return getModelLoader(typeChecker);
}
public static JDTModuleManager getProjectModuleManager(IProject project) {
JDTModelLoader modelLoader = getProjectModelLoader(project);
if (modelLoader == null) {
return null;
}
return modelLoader.getModuleManager();
}
public final static class BooleanHolder {
public boolean value;
}
public static class CeylonBuildHook {
protected void startBuild(int kind, @SuppressWarnings("rawtypes") Map args,
IProject javaProject, IBuildConfiguration config, IBuildContext context, IProgressMonitor monitor) throws CoreException {}
protected void deltasAnalyzed(List<IResourceDelta> currentDeltas,
BooleanHolder sourceModified, BooleanHolder mustDoFullBuild,
BooleanHolder mustResolveClasspathContainer, boolean mustContinueBuild) {}
protected void resolvingClasspathContainer(
List<IClasspathContainer> cpContainers) {}
protected void setAndRefreshClasspathContainer() {}
protected void doFullBuild() {}
protected void parseCeylonModel() {}
protected void doIncrementalBuild() {}
protected void fullTypeCheckDuringIncrementalBuild() {}
protected void incrementalBuildChangedSources(Set<IFile> changedSources) {}
protected void incrementalBuildSources(Set<IFile> changedSources,
List<IFile> filesToRemove, Collection<IFile> sourcesToCompile) {}
protected void incrementalBuildResult(List<PhasedUnit> builtPhasedUnits) {}
protected void beforeGeneratingBinaries() {}
protected void afterGeneratingBinaries() {}
protected void scheduleReentrantBuild() {}
protected void afterReentrantBuild() {}
protected void endBuild() {}
};
public static final CeylonBuildHook noOpHook = new CeylonBuildHook();
public static enum RootFolderType {
SOURCE,
RESOURCE
}
public static final QualifiedName RESOURCE_PROPERTY_PACKAGE_MODEL = new QualifiedName(CeylonPlugin.PLUGIN_ID, "resourceProperty_packageModel");
public static final QualifiedName RESOURCE_PROPERTY_ROOT_FOLDER = new QualifiedName(CeylonPlugin.PLUGIN_ID, "resourceProperty_rootFolder");
public static final QualifiedName RESOURCE_PROPERTY_ROOT_FOLDER_TYPE = new QualifiedName(CeylonPlugin.PLUGIN_ID, "resourceProperty_rootFolderType");
private static CeylonBuildHook buildHook = new CeylonBuildHook() {
List<CeylonBuildHook> contributedHooks = new LinkedList<>();
private synchronized void resetContributedHooks() {
contributedHooks.clear();
for (IConfigurationElement confElement : Platform.getExtensionRegistry().getConfigurationElementsFor(CeylonPlugin.PLUGIN_ID + ".ceylonBuildHook")) {
try {
Object extension = confElement.createExecutableExtension("class");
if (extension instanceof ICeylonBuildHookProvider) {
CeylonBuildHook hook = ((ICeylonBuildHookProvider) extension).getHook();
if (hook != null) {
contributedHooks.add(hook);
}
}
} catch (CoreException e) {
e.printStackTrace();
}
}
}
protected void startBuild(int kind, @SuppressWarnings("rawtypes") Map args,
IProject javaProject, IBuildConfiguration config, IBuildContext context, IProgressMonitor monitor) throws CoreException {
resetContributedHooks();
for (CeylonBuildHook hook : contributedHooks) {
hook.startBuild(kind, args, javaProject, config, context, monitor);
}
}
protected void deltasAnalyzed(List<IResourceDelta> currentDeltas,
BooleanHolder sourceModified, BooleanHolder mustDoFullBuild,
BooleanHolder mustResolveClasspathContainer, boolean mustContinueBuild) {
for (CeylonBuildHook hook : contributedHooks) {
hook.deltasAnalyzed(currentDeltas, sourceModified, mustDoFullBuild, mustResolveClasspathContainer, mustContinueBuild);
}
}
protected void resolvingClasspathContainer(
List<IClasspathContainer> cpContainers) {
for (CeylonBuildHook hook : contributedHooks) {
hook.resolvingClasspathContainer(cpContainers);
}
}
protected void setAndRefreshClasspathContainer() {
for (CeylonBuildHook hook : contributedHooks) {
hook.setAndRefreshClasspathContainer();
}
}
protected void doFullBuild() {
for (CeylonBuildHook hook : contributedHooks) {
hook.doFullBuild();
}
}
protected void parseCeylonModel() {
for (CeylonBuildHook hook : contributedHooks) {
hook.parseCeylonModel();
}
}
protected void doIncrementalBuild() {
for (CeylonBuildHook hook : contributedHooks) {
hook.doIncrementalBuild();
}
}
protected void fullTypeCheckDuringIncrementalBuild() {
for (CeylonBuildHook hook : contributedHooks) {
hook.fullTypeCheckDuringIncrementalBuild();
}
}
protected void incrementalBuildChangedSources(Set<IFile> changedSources) {
for (CeylonBuildHook hook : contributedHooks) {
hook.incrementalBuildChangedSources(changedSources);
}
}
protected void incrementalBuildSources(Set<IFile> changedSources,
List<IFile> filesToRemove, Collection<IFile> sourcesToCompile) {
for (CeylonBuildHook hook : contributedHooks) {
hook.incrementalBuildSources(changedSources, filesToRemove, sourcesToCompile);
}
}
protected void incrementalBuildResult(List<PhasedUnit> builtPhasedUnits) {
for (CeylonBuildHook hook : contributedHooks) {
hook.incrementalBuildResult(builtPhasedUnits);
}
}
protected void beforeGeneratingBinaries() {
for (CeylonBuildHook hook : contributedHooks) {
hook.beforeGeneratingBinaries();
}
}
protected void afterGeneratingBinaries() {
for (CeylonBuildHook hook : contributedHooks) {
hook.afterGeneratingBinaries();
}
}
protected void scheduleReentrantBuild() {
for (CeylonBuildHook hook : contributedHooks) {
hook.beforeGeneratingBinaries();
}
}
protected void afterReentrantBuild() {
for (CeylonBuildHook hook : contributedHooks) {
hook.afterReentrantBuild();
}
}
protected void endBuild() {
for (CeylonBuildHook hook : contributedHooks) {
hook.endBuild();
}
}
};
public static CeylonBuildHook replaceHook(CeylonBuildHook hook){
CeylonBuildHook previousHook = buildHook;
buildHook = hook;
return previousHook;
}
private static WeakReference<Job> notificationJobReference = null;
private static synchronized Job getNotificationJob() {
Job job = null;
if (notificationJobReference != null) {
job = notificationJobReference.get();
}
if (job == null) {
for (Job j : Job.getJobManager().find(null)) {
if (NotificationManager.class.equals(j.getClass().getEnclosingClass())) {
job = j;
notificationJobReference = new WeakReference<Job>(job);
break;
}
}
}
return job;
}
public static void waitForUpToDateJavaModel(long timeout, IProject project, IProgressMonitor monitor) {
Job job = getNotificationJob();
if (job == null) {
return;
}
monitor.subTask("Taking in account the resource changes of the previous builds" + project != null ? project.getName() : "");
long timeLimit = System.currentTimeMillis() + timeout;
while (job.getState() != Job.NONE) {
boolean stopWaiting = false;
if (job.isBlocking()) {
stopWaiting = true;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
stopWaiting = true;
}
if (System.currentTimeMillis() > timeLimit) {
stopWaiting = true;
}
if (stopWaiting) {
break;
}
}
}
@Override
protected IProject[] build(final int kind, @SuppressWarnings("rawtypes") Map args, IProgressMonitor mon)
throws CoreException {
final IProject project = getProject();
final IJavaProject javaProject = JavaCore.create(project);
final SubMonitor monitor = SubMonitor.convert(mon, "Ceylon build of project " + project.getName(), 100);
try {
buildHook.startBuild(kind, args, project, getBuildConfig(), getContext(), monitor);
} catch (CoreException e) {
if (e.getStatus().getSeverity() == IStatus.CANCEL) {
return project.getReferencedProjects();
}
}
try {
IMarker[] buildMarkers = project.findMarkers(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER, true, DEPTH_ZERO);
for (IMarker m: buildMarkers) {
Object message = m.getAttribute(IMarker.MESSAGE);
Object sourceId = m.getAttribute(IMarker.SOURCE_ID);
if (message!=null && message.toString().endsWith("'.exploded'")) {
//ignore message from JDT about missing JDTClasses dir
m.delete();
}
if (sourceId!=null && sourceId.equals(PLUGIN_ID)) {
// Delete markers added by this builder since they will be added again just after.
m.delete();
}
else if (message!=null && message.toString().contains("is missing required Java project:")) {
return project.getReferencedProjects();
}
}
List<IClasspathContainer> cpContainers = getCeylonClasspathContainers(javaProject);
if (! preBuildChecks(project, javaProject, cpContainers)) {
return project.getReferencedProjects();
}
List<PhasedUnit> builtPhasedUnits = Collections.emptyList();
final BooleanHolder mustDoFullBuild = new BooleanHolder();
final BooleanHolder mustResolveClasspathContainer = new BooleanHolder();
final IResourceDelta currentDelta = getDelta(project);
List<IResourceDelta> projectDeltas = new ArrayList<IResourceDelta>();
projectDeltas.add(currentDelta);
for (IProject requiredProject : project.getReferencedProjects()) {
projectDeltas.add(getDelta(requiredProject));
}
boolean somethingToDo = chooseBuildTypeFromDeltas(kind, project,
projectDeltas, mustDoFullBuild, mustResolveClasspathContainer);
if (!somethingToDo && (args==null || !args.containsKey(BUILDER_ID + ".reentrant"))) {
return project.getReferencedProjects();
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
if (mustResolveClasspathContainer.value) {
if (cpContainers != null) {
buildHook.resolvingClasspathContainer(cpContainers);
for (IClasspathContainer container: cpContainers) {
if (container instanceof CeylonProjectModulesContainer) {
CeylonProjectModulesContainer applicationModulesContainer = (CeylonProjectModulesContainer) container;
boolean changed = applicationModulesContainer.resolveClasspath(monitor.newChild(19, PREPEND_MAIN_LABEL_TO_SUBTASK), true);
if(changed) {
buildHook.setAndRefreshClasspathContainer();
JavaCore.setClasspathContainer(applicationModulesContainer.getPath(),
new IJavaProject[]{javaProject},
new IClasspathContainer[]{null} , monitor);
applicationModulesContainer.refreshClasspathContainer(monitor);
}
}
}
}
}
boolean mustWarmupCompletionProcessor = false;
final TypeChecker typeChecker;
Collection<IFile> filesForBinaryGeneration = Collections.emptyList();
if (mustDoFullBuild.value) {
buildHook.doFullBuild();
monitor.setTaskName("Full Ceylon build of project " + project.getName());
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
cleanupModules(monitor, project);
cleanupJdtClasses(monitor, project);
monitor.subTask("Clearing existing markers of project " + project.getName());
clearProjectMarkers(project, true, false);
clearMarkersOn(project, true);
monitor.worked(1);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
//if (! getModelState(project).equals(ModelState.Parsed)) {
if (!mustResolveClasspathContainer.value) {
monitor.subTask("Parsing source of project " + project.getName());
//if we already resolved the classpath, the
//model has already been freshly-parsed
buildHook.parseCeylonModel();
typeChecker = parseCeylonModel(project,
monitor.newChild(19, PREPEND_MAIN_LABEL_TO_SUBTASK));
}
else {
typeChecker = getProjectTypeChecker(project);
}
monitor.setWorkRemaining(80);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.subTask("Typechecking all source files of project " + project.getName());
modelStates.put(project, ModelState.TypeChecking);
builtPhasedUnits = doWithCeylonModelCaching(new Callable<List<PhasedUnit>>() {
@Override
public List<PhasedUnit> call() throws Exception {
return fullTypeCheck(project, typeChecker,
monitor.newChild(30, PREPEND_MAIN_LABEL_TO_SUBTASK ));
}
});
modelStates.put(project, ModelState.TypeChecked);
filesForBinaryGeneration = getProjectFiles(project);
mustWarmupCompletionProcessor = true;
}
else
{
buildHook.doIncrementalBuild();
typeChecker = typeCheckers.get(project);
PhasedUnits phasedUnits = typeChecker.getPhasedUnits();
List<IFile> filesToRemove = new ArrayList<IFile>();
Set<IFile> changedFiles = new HashSet<IFile>();
monitor.subTask("Scanning deltas of project " + project.getName());
scanChanges(currentDelta, projectDeltas, filesToRemove,
getProjectFiles(project), changedFiles, monitor);
monitor.worked(4);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.subTask("Cleaning removed files for project " + project.getName());
cleanRemovedFilesFromCeylonModel(filesToRemove, phasedUnits, project);
cleanRemovedFilesFromOutputs(filesToRemove, project);
monitor.worked(1);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
if (!isModelTypeChecked(project)) {
buildHook.fullTypeCheckDuringIncrementalBuild();
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.subTask("Clearing existing markers of project (except backend errors)" + project.getName());
clearProjectMarkers(project, true, false);
clearMarkersOn(project, false);
monitor.worked(1);
monitor.subTask("Initial typechecking all source files of project " + project.getName());
modelStates.put(project, ModelState.TypeChecking);
builtPhasedUnits = doWithCeylonModelCaching(new Callable<List<PhasedUnit>>() {
@Override
public List<PhasedUnit> call() throws Exception {
return fullTypeCheck(project, typeChecker,
monitor.newChild(22, PREPEND_MAIN_LABEL_TO_SUBTASK ));
}
});
modelStates.put(project, ModelState.TypeChecked);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.subTask("Collecting dependencies of project " + project.getName());
// getConsoleStream().println(timedMessage("Collecting dependencies"));
collectDependencies(project, typeChecker, builtPhasedUnits);
monitor.worked(1);
monitor.subTask("Collecting problems for project "
+ project.getName());
addProblemAndTaskMarkers(builtPhasedUnits, project);
monitor.worked(1);
mustWarmupCompletionProcessor = true;
}
monitor.setWorkRemaining(70);
monitor.subTask("Incremental Ceylon build of project " + project.getName());
monitor.subTask("Scanning dependencies of deltas of project " + project.getName());
final Set<IFile> filesToCompile = new HashSet<>();
final Set<IFile> filesToTypecheck = new HashSet<>();
calculateDependencies(project, currentDelta,
changedFiles, typeChecker, phasedUnits, filesToTypecheck, filesToCompile, monitor);
monitor.worked(1);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
buildHook.incrementalBuildSources(changedFiles, filesToRemove, filesToTypecheck);
clearProjectMarkers(project, true, false);
clearMarkersOn(filesToTypecheck, true);
clearMarkersOn(filesToCompile, true, true);
monitor.subTask("Compiling " + filesToTypecheck.size() + " source files in project " +
project.getName());
builtPhasedUnits = doWithCeylonModelCaching(new Callable<List<PhasedUnit>>() {
@Override
public List<PhasedUnit> call() throws Exception {
return incrementalBuild(project, filesToTypecheck,
monitor.newChild(19, PREPEND_MAIN_LABEL_TO_SUBTASK));
}
});
if (builtPhasedUnits.isEmpty() && filesToTypecheck.isEmpty() && filesToCompile.isEmpty()) {
if (mustWarmupCompletionProcessor) {
warmupCompletionProcessor(project);
}
return project.getReferencedProjects();
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
buildHook.incrementalBuildResult(builtPhasedUnits);
filesForBinaryGeneration = filesToCompile;
}
clearProjectMarkers(project, false, true);
monitor.setWorkRemaining(50);
monitor.subTask("Collecting problems for project "
+ project.getName());
addProblemAndTaskMarkers(builtPhasedUnits, project);
monitor.worked(1);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.subTask("Collecting dependencies of project " + project.getName());
// getConsoleStream().println(timedMessage("Collecting dependencies"));
collectDependencies(project, typeChecker, builtPhasedUnits);
monitor.worked(4);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
buildHook.beforeGeneratingBinaries();
monitor.subTask("Generating binaries for project " + project.getName());
final Collection<IFile> filesToProcess = filesForBinaryGeneration;
final Collection<PhasedUnit> unitsTypecheckedIncrementally = mustDoFullBuild.value ? Collections.<PhasedUnit>emptyList() : builtPhasedUnits;
cleanChangedFilesFromExplodedDirectory(filesToProcess, project);
doWithCeylonModelCaching(new Callable<Boolean>() {
@Override
public Boolean call() throws CoreException {
return generateBinaries(project, javaProject, unitsTypecheckedIncrementally,
filesToProcess, typeChecker,
monitor.newChild(45, PREPEND_MAIN_LABEL_TO_SUBTASK));
}
});
buildHook.afterGeneratingBinaries();
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
if (isExplodeModulesEnabled(project)) {
monitor.subTask("Rebuilding using exploded modules directory of " + project.getName());
sheduleIncrementalRebuild(args, project, monitor);
}
if (mustWarmupCompletionProcessor) {
warmupCompletionProcessor(project);
}
return project.getReferencedProjects();
}
finally {
monitor.done();
buildHook.endBuild();
}
}
/*
* Checks for global build error and add the relevant markers.
*
* Returns true if the build can continue, and false if the build should stop.
*
*/
private boolean preBuildChecks(final IProject project,
final IJavaProject javaProject,
List<IClasspathContainer> cpContainers) throws CoreException,
JavaModelException {
boolean languageModuleContainerFound = false;
boolean applicationModulesContainerFound = false;
boolean buildCanContinue = true;
for (IClasspathContainer container : cpContainers) {
if (container instanceof CeylonLanguageModuleContainer) {
languageModuleContainerFound = true;
}
if (container instanceof CeylonProjectModulesContainer) {
applicationModulesContainerFound = true;
}
}
if (! languageModuleContainerFound) {
//if the ClassPathContainer is missing, add an error
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "The Ceylon classpath container for the language module is not set on the project " +
" (try running Enable Ceylon Builder on the project)");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
buildCanContinue = false;
}
if (! applicationModulesContainerFound) {
//if the ClassPathContainer is missing, add an error
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "The Ceylon classpath container for application modules is not set on the project " +
" (try running Enable Ceylon Builder on the project)");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
buildCanContinue = false;
}
/* Begin issue #471 */
ICommand[] builders = project.getDescription().getBuildSpec();
int javaOrder=0, ceylonOrder = 0;
for (int n=0; n<builders.length; n++) {
if (builders[n].getBuilderName().equals(JavaCore.BUILDER_ID)) {
javaOrder = n;
}
else if (builders[n].getBuilderName().equals(CeylonBuilder.BUILDER_ID)) {
ceylonOrder = n;
}
}
if (ceylonOrder < javaOrder) {
//if the build order is not correct, add an error and return
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "The Ceylon Builder should run after the Java Builder. Change the order of builders in the project properties");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, "Project " + project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
buildCanContinue = false;
}
/* End issue #471 */
boolean sourceDirectoryInProjectFolder = false;
boolean outputDirectoryInProjectFolder = javaProject.getOutputLocation().equals(javaProject.getPath());
for (IPackageFragmentRoot root : javaProject.getAllPackageFragmentRoots()) {
if (root.getRawClasspathEntry().getEntryKind() == IClasspathEntry.CPE_SOURCE
&& root.getResource().getLocation().equals(project.getLocation())) {
sourceDirectoryInProjectFolder = true;
break;
}
}
if (sourceDirectoryInProjectFolder || outputDirectoryInProjectFolder) {
if (sourceDirectoryInProjectFolder) {
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "One source directory is the root folder of the project, which is not supported for Ceylon projects." +
" Change it in the project properties");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, "Project " + project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
}
if (outputDirectoryInProjectFolder) {
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "The project Java class directory is the root folder of the project, which is not supported for Ceylon projects." +
" Change it in the project properties");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, "Project " + project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
}
buildCanContinue = false;
}
IPath modulesOutputFolderPath = getCeylonModulesOutputFolder(project).getRawLocation();
IPath jdtOutputFolderPath = javaProject.getOutputLocation();
IFolder jdtOutputFolder = project.getWorkspace().getRoot().getFolder(jdtOutputFolderPath);
if (jdtOutputFolder.exists()) {
jdtOutputFolderPath = jdtOutputFolder.getRawLocation();
}
if (modulesOutputFolderPath.isPrefixOf(jdtOutputFolderPath) || jdtOutputFolderPath.isPrefixOf(modulesOutputFolderPath)) {
//if the build order is not correct, add an error and return
IMarker marker = project.createMarker(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER);
marker.setAttribute(IMarker.MESSAGE, "The Ceylon modules output directory and Java class directory shoudln't collide." +
" Change one of them in the project properties");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, "Project " + project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
buildCanContinue = false;
}
if (! isInSyncWithCeylonConfig(project)) {
//if the build order is not correct, add an error and return
IMarker marker = project.createMarker(CEYLON_CONFIG_NOT_IN_SYNC_MARKER);
marker.setAttribute(IMarker.MESSAGE, "The Ceylon Build Paths are not in sync with those in the ceylon configuration file ("
+ "./ceylon/config)\n"
+ "Either modify this file or change the build paths accordingly in the project properties");
marker.setAttribute(IMarker.PRIORITY, IMarker.PRIORITY_HIGH);
marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR);
marker.setAttribute(IMarker.LOCATION, project.getName());
marker.setAttribute(IMarker.SOURCE_ID, PLUGIN_ID);
buildCanContinue = false;
}
if (project.findMarkers(CHARSET_PROBLEM_MARKER_ID, false, IResource.DEPTH_ZERO).length > 0) {
buildCanContinue = false;
}
return buildCanContinue;
}
public static boolean isInSyncWithCeylonConfig(final IProject project) {
Set<String> sourceFoldersFromCeylonConfig = new TreeSet<String>();
Set<String> sourceFoldersFromEclipseProject = new TreeSet<String>();
Set<String> resourceFoldersFromCeylonConfig = new TreeSet<String>();
Set<String> resourceFoldersFromEclipseProject = new TreeSet<String>();
CeylonProjectConfig ceylonConfig = CeylonProjectConfig.get(project);
for (String path : ceylonConfig.getProjectSourceDirectories()) {
sourceFoldersFromCeylonConfig.add(Path.fromOSString(path).toString());
}
for (String path : ceylonConfig.getProjectResourceDirectories()) {
resourceFoldersFromCeylonConfig.add(Path.fromOSString(path).toString());
}
for (IFolder folder : getSourceFolders(project)) {
IPath path = folder.isLinked() ? folder.getLocation() : folder.getProjectRelativePath();
sourceFoldersFromEclipseProject.add(path.toString());
}
for (IFolder folder : getResourceFolders(project)) {
IPath path = folder.isLinked() ? folder.getLocation() : folder.getProjectRelativePath();
resourceFoldersFromEclipseProject.add(path.toString());
}
if (sourceFoldersFromEclipseProject.isEmpty()) {
sourceFoldersFromEclipseProject.add(Constants.DEFAULT_SOURCE_DIR);
}
if (resourceFoldersFromEclipseProject.isEmpty()) {
resourceFoldersFromEclipseProject.add(Constants.DEFAULT_RESOURCE_DIR);
}
return sourceFoldersFromCeylonConfig.equals(sourceFoldersFromEclipseProject) &&
resourceFoldersFromCeylonConfig.equals(resourceFoldersFromEclipseProject);
}
private void warmupCompletionProcessor(final IProject project) {
Job job = new WarmupJob(project);
job.setPriority(Job.BUILD);
//job.setSystem(true);
job.setRule(project.getWorkspace().getRoot());
job.schedule();
}
private void sheduleIncrementalRebuild(@SuppressWarnings("rawtypes") Map args, final IProject project,
IProgressMonitor monitor) {
try {
getCeylonClassesOutputFolder(project).refreshLocal(DEPTH_INFINITE, monitor);
}
catch (CoreException e) {
e.printStackTrace();
}//monitor);
if (args==null || !args.containsKey(BUILDER_ID + ".reentrant")) {
buildHook.scheduleReentrantBuild();
final CeylonBuildHook currentBuildHook = buildHook;
Job job = new Job("Rebuild with Ceylon classes") {
@Override
protected IStatus run(IProgressMonitor monitor) {
try {
//we have already done a build of both the Java and Ceylon classes
//so now go back and try to build the both the Java and Ceylon
//classes again, using the classes we previously generated - this
//is to allow references from Java to Ceylon
project.build(INCREMENTAL_BUILD, JavaCore.BUILDER_ID, null, monitor);
Map<String,String> map = new HashMap<String,String>();
map.put(BUILDER_ID + ".reentrant", "true");
project.build(INCREMENTAL_BUILD, BUILDER_ID, map, monitor);
currentBuildHook.afterReentrantBuild();
}
catch (CoreException e) {
e.printStackTrace();
}
return Status.OK_STATUS;
}
};
job.setRule(project.getWorkspace().getRoot());
job.schedule();
}
}
private void collectDependencies(IProject project, TypeChecker typeChecker,
List<PhasedUnit> builtPhasedUnits) throws CoreException {
for (PhasedUnit pu : builtPhasedUnits) {
new UnitDependencyVisitor(pu).visit(pu.getCompilationUnit());
}
}
private void cleanRemovedFilesFromCeylonModel(Collection<IFile> filesToRemove,
PhasedUnits phasedUnits, IProject project) {
for (IFile fileToRemove: filesToRemove) {
if(isCeylon(fileToRemove)) {
// Remove the ceylon phasedUnit (which will also remove the unit from the package)
PhasedUnit phasedUnitToDelete = phasedUnits.getPhasedUnit(createResourceVirtualFile(fileToRemove));
if (phasedUnitToDelete != null) {
assert(phasedUnitToDelete instanceof ProjectPhasedUnit);
((ProjectPhasedUnit) phasedUnitToDelete).remove();
}
}
else if (isJava(fileToRemove)) {
// Remove the external unit from the package
Package pkg = getPackage(fileToRemove);
if (pkg != null) {
for (Unit unitToTest: pkg.getUnits()) {
if (unitToTest.getFilename().equals(fileToRemove.getName())) {
assert(unitToTest instanceof JavaUnit);
JavaUnit javaUnit = (JavaUnit) unitToTest;
javaUnit.remove();
break;
}
}
}
}
}
}
private void calculateDependencies(IProject project,
IResourceDelta currentDelta,
Collection<IFile> changedFiles, TypeChecker typeChecker,
PhasedUnits phasedUnits, Set<IFile> filesToTypeCheck, Set<IFile> filesToCompile, IProgressMonitor monitor) {
Set<IFile> filesToAddInTypecheck = new HashSet<IFile>();
Set<IFile> filesToAddInCompile = new HashSet<IFile>();
if (!changedFiles.isEmpty()) {
Set<IFile> allTransitivelyDependingFiles = searchForDependantFiles(
project, changedFiles, typeChecker, monitor,
false);
Set<IFile> dependingFilesAccordingToStructureDelta;
boolean astAwareIncrementalBuild = areAstAwareIncrementalBuildsEnabled(project);
if (astAwareIncrementalBuild) {
dependingFilesAccordingToStructureDelta = searchForDependantFiles(
project, changedFiles, typeChecker, monitor,
true);
} else {
dependingFilesAccordingToStructureDelta = allTransitivelyDependingFiles;
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnits.getPhasedUnits()) {
Unit unit = phasedUnit.getUnit();
if (!unit.getUnresolvedReferences().isEmpty()) {
IFile fileToAdd = ((IFileVirtualFile)(phasedUnit.getUnitFile())).getFile();
if (fileToAdd.exists()) {
filesToAddInTypecheck.add(fileToAdd);
filesToAddInCompile.add(fileToAdd);
}
}
Set<Declaration> duplicateDeclarations = unit.getDuplicateDeclarations();
if (!duplicateDeclarations.isEmpty()) {
IFile fileToAdd = ((IFileVirtualFile)(phasedUnit.getUnitFile())).getFile();
if (fileToAdd.exists()) {
filesToAddInTypecheck.add(fileToAdd);
filesToAddInCompile.add(fileToAdd);
}
for (Declaration duplicateDeclaration : duplicateDeclarations) {
Unit duplicateUnit = duplicateDeclaration.getUnit();
if ((duplicateUnit instanceof SourceFile) &&
(duplicateUnit instanceof IResourceAware)) {
IFile duplicateDeclFile = ((IResourceAware) duplicateUnit).getFileResource();
if (duplicateDeclFile != null && duplicateDeclFile.exists()) {
filesToAddInTypecheck.add(duplicateDeclFile);
filesToAddInCompile.add(duplicateDeclFile);
}
}
}
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (IFile f: allTransitivelyDependingFiles) {
if (f.getProject() == project) {
if (isSourceFile(f) || isResourceFile(f)) {
if (f.exists()) {
filesToAddInTypecheck.add(f);
if (!astAwareIncrementalBuild || dependingFilesAccordingToStructureDelta.contains(f)) {
filesToAddInCompile.add(f);
}
}
else {
// If the file is moved : add a dependency on the new file
if (currentDelta != null) {
IResourceDelta removedFile = currentDelta.findMember(f.getProjectRelativePath());
if (removedFile != null &&
(removedFile.getFlags() & IResourceDelta.MOVED_TO) != 0 &&
removedFile.getMovedToPath() != null) {
IFile movedFile = project.getFile(removedFile.getMovedToPath().removeFirstSegments(1));
filesToAddInTypecheck.add(movedFile);
if (!astAwareIncrementalBuild || dependingFilesAccordingToStructureDelta.contains(movedFile)) {
filesToAddInCompile.add(movedFile);
}
}
}
}
}
}
}
}
for (IFile file : getProjectFiles(project)) {
try {
if (file.findMarkers(PROBLEM_MARKER_ID + ".backend", false, IResource.DEPTH_ZERO).length > 0) {
filesToAddInCompile.add(file);
}
} catch (CoreException e) {
e.printStackTrace();
filesToAddInCompile.add(file);
}
}
filesToTypeCheck.addAll(filesToAddInTypecheck);
filesToCompile.addAll(filesToAddInCompile);
}
private Set<IFile> searchForDependantFiles(IProject project,
Collection<IFile> changedFiles, TypeChecker typeChecker,
IProgressMonitor monitor, boolean filterAccordingToStructureDelta) {
Set<IFile> changeDependents= new HashSet<IFile>();
Set<IFile> analyzedFiles= new HashSet<IFile>();
changeDependents.addAll(changedFiles);
boolean changed = false;
do {
Collection<IFile> additions= new HashSet<IFile>();
for (Iterator<IFile> iter=changeDependents.iterator(); iter.hasNext();) {
final IFile srcFile= iter.next();
if (analyzedFiles.contains(srcFile)) {
continue;
}
analyzedFiles.add(srcFile);
IProject currentFileProject = srcFile.getProject();
TypeChecker currentFileTypeChecker = null;
if (currentFileProject == project) {
currentFileTypeChecker = typeChecker;
}
else {
currentFileTypeChecker = getProjectTypeChecker(currentFileProject);
}
if (! CeylonBuilder.isInSourceFolder(srcFile)) {
// Don't search dependencies inside resource folders.
continue;
}
if (filterAccordingToStructureDelta) {
IResourceAware unit = getUnit(srcFile);
if (unit instanceof ProjectSourceFile) {
ProjectSourceFile projectSourceFile = (ProjectSourceFile) unit;
if (projectSourceFile.getDependentsOf().size() > 0) {
CompilationUnitDelta delta = projectSourceFile.buildDeltaAgainstModel();
if (delta != null
&& delta.getChanges().getSize() == 0
&& delta.getChildrenDeltas().getSize() == 0) {
continue;
}
}
}
}
Set<String> filesDependingOn = getDependentsOf(srcFile,
currentFileTypeChecker, currentFileProject);
for (String dependingFile: filesDependingOn) {
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
//TODO: note that the following is slightly
// fragile - it depends on the format
// of the path that we use to track
// dependents!
IPath pathRelativeToProject = new Path(dependingFile);
//.makeRelativeTo(project.getLocation());
IFile depFile= (IFile) project.findMember(pathRelativeToProject);
if (depFile == null) {
depFile= (IFile) currentFileProject.findMember(dependingFile);
}
if (depFile != null) {
additions.add(depFile);
}
else {
System.err.println("could not resolve dependent unit: " +
dependingFile);
}
}
}
changed = changeDependents.addAll(additions);
} while (changed && !filterAccordingToStructureDelta);
return changeDependents;
}
private void scanChanges(final IResourceDelta currentDelta,
List<IResourceDelta> projectDeltas, final List<IFile> filesToRemove,
final List<IFile> currentProjectSources, final Set<IFile> changedSources, IProgressMonitor monitor)
throws CoreException {
for (final IResourceDelta projectDelta: projectDeltas) {
if (projectDelta != null) {
final IProject project = (IProject) projectDelta.getResource();
for (IResourceDelta projectAffectedChild: projectDelta.getAffectedChildren()) {
if (! (projectAffectedChild.getResource() instanceof IFolder)) {
continue;
}
final IFolder rootFolder = (IFolder) projectAffectedChild.getResource();
RootFolderType rootFolderType = getRootFolderType(rootFolder);
final boolean inSourceDirectory = rootFolderType == RootFolderType.SOURCE;
final boolean inResourceDirectory = rootFolderType == RootFolderType.RESOURCE;
if (inResourceDirectory || inSourceDirectory) {
// a real Ceylon source or resource folder so scan for changes
projectAffectedChild.accept(new IResourceDeltaVisitor() {
public boolean visit(IResourceDelta delta) throws CoreException {
IResource resource = delta.getResource();
if (resource instanceof IFile) {
IFile file= (IFile) resource;
if (inResourceDirectory || (isCompilable(file) && inSourceDirectory) ) {
changedSources.add(file);
if (projectDelta == currentDelta) {
if (delta.getKind() == IResourceDelta.REMOVED) {
filesToRemove.add(file);
currentProjectSources.remove(file);
}
if (delta.getKind() == IResourceDelta.ADDED) {
IFile addedFile = (IFile) resource;
int index = currentProjectSources.indexOf(addedFile);
if ((index >= 0)) {
currentProjectSources.remove(index);
}
currentProjectSources.add(addedFile);
}
}
}
return false;
}
if (resource instanceof IFolder) {
IFolder folder= (IFolder) resource;
if (projectDelta == currentDelta) {
if (folder.exists() && delta.getKind() != IResourceDelta.REMOVED) {
if (getPackage(folder) == null || getRootFolder(folder) == null) {
IContainer parent = folder.getParent();
if (parent instanceof IFolder) {
Package parentPkg = getPackage((IFolder)parent);
if (parentPkg != null) {
Package pkg = getProjectModelLoader(project).findOrCreatePackage(parentPkg.getModule(), parentPkg.getNameAsString() + "." + folder.getName());
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_PACKAGE_MODEL, new WeakReference<Package>(pkg));
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_ROOT_FOLDER, rootFolder);
}
}
}
}
}
}
return true;
}
});
}
}
}
}
}
public boolean chooseBuildTypeFromDeltas(final int kind, final IProject project,
final List<IResourceDelta> currentDeltas,
final BooleanHolder mustDoFullBuild,
final BooleanHolder mustResolveClasspathContainer) {
mustDoFullBuild.value = kind == FULL_BUILD || kind == CLEAN_BUILD ||
!isModelParsed(project);
mustResolveClasspathContainer.value = kind==FULL_BUILD; //false;
final BooleanHolder somethingToBuild = new BooleanHolder();
if (JavaProjectStateMirror.hasClasspathChanged(project)) {
mustDoFullBuild.value = true;
}
if (!mustDoFullBuild.value || !mustResolveClasspathContainer.value) {
for (IResourceDelta currentDelta: currentDeltas) {
if (currentDelta != null) {
try {
currentDelta.accept(new DeltaScanner(mustDoFullBuild, project,
somethingToBuild, mustResolveClasspathContainer));
}
catch (CoreException e) {
e.printStackTrace();
mustDoFullBuild.value = true;
mustResolveClasspathContainer.value = true;
}
}
else {
mustDoFullBuild.value = true;
mustResolveClasspathContainer.value = true;
}
}
}
class DecisionMaker {
public boolean mustContinueBuild() {
return mustDoFullBuild.value ||
mustResolveClasspathContainer.value ||
somethingToBuild.value ||
! isModelTypeChecked(project);
}
}; DecisionMaker decisionMaker = new DecisionMaker();
buildHook.deltasAnalyzed(currentDeltas, somethingToBuild, mustDoFullBuild, mustResolveClasspathContainer, decisionMaker.mustContinueBuild());
return decisionMaker.mustContinueBuild();
}
// private static String successMessage(boolean binariesGenerationOK) {
// return " " + (binariesGenerationOK ?
// "...binary generation succeeded" : "...binary generation FAILED");
// }
private Set<String> getDependentsOf(IFile srcFile,
TypeChecker currentFileTypeChecker,
IProject currentFileProject) {
if (isCeylon(srcFile)) {
PhasedUnit phasedUnit = currentFileTypeChecker.getPhasedUnits()
.getPhasedUnit(ResourceVirtualFile.createResourceVirtualFile(srcFile));
if (phasedUnit != null && phasedUnit.getUnit() != null) {
return phasedUnit.getUnit().getDependentsOf();
}
}
else {
Unit unit = getJavaUnit(getProject(), srcFile);
if (unit instanceof JavaCompilationUnit) {
return unit.getDependentsOf();
}
}
return Collections.emptySet();
}
static ProjectPhasedUnit parseFileToPhasedUnit(final ModuleManager moduleManager, final TypeChecker typeChecker,
final ResourceVirtualFile file, final ResourceVirtualFile srcDir,
final Package pkg) {
return new CeylonSourceParser<ProjectPhasedUnit>() {
@Override
protected String getCharset() {
try {
return file.getResource().getProject().getDefaultCharset();
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
@Override
protected ProjectPhasedUnit createPhasedUnit(CompilationUnit cu, Package pkg, CommonTokenStream tokenStream) {
return new ProjectPhasedUnit(file, srcDir, cu, pkg,
moduleManager, typeChecker, tokenStream.getTokens());
}
}.parseFileToPhasedUnit(moduleManager, typeChecker, file, srcDir, pkg);
}
private List<PhasedUnit> incrementalBuild(IProject project, Collection<IFile> sourceToCompile,
IProgressMonitor mon) {
SubMonitor monitor = SubMonitor.convert(mon,
"Typechecking " + sourceToCompile.size() + " source files in project " +
project.getName(), sourceToCompile.size()*6);
TypeChecker typeChecker = typeCheckers.get(project);
PhasedUnits pus = typeChecker.getPhasedUnits();
JDTModuleManager moduleManager = (JDTModuleManager) pus.getModuleManager();
JDTModelLoader modelLoader = getModelLoader(typeChecker);
// First refresh the modules that are cross-project references to sources modules
// in referenced projects. This will :
// - clean the binary declarations and reload the class-to-source mapping file for binary-based modules,
// - remove old PhasedUnits and parse new or updated PhasedUnits from the source archive for source-based modules
for (Module m : typeChecker.getContext().getModules().getListOfModules()) {
if (m instanceof JDTModule) {
JDTModule module = (JDTModule) m;
if (module.isCeylonArchive()) {
module.refresh();
}
}
}
// Secondly typecheck again the changed PhasedUnits in changed external source modules
// (those which come from referenced projects)
List<PhasedUnits> phasedUnitsOfDependencies = typeChecker.getPhasedUnitsOfDependencies();
List<PhasedUnit> dependencies = new ArrayList<PhasedUnit>();
for (PhasedUnits phasedUnits: phasedUnitsOfDependencies) {
for (PhasedUnit phasedUnit: phasedUnits.getPhasedUnits()) {
dependencies.add(phasedUnit);
}
}
for (PhasedUnit pu: dependencies) {
monitor.subTask("- scanning declarations " + pu.getUnit().getFilename());
pu.scanDeclarations();
monitor.worked(1);
}
for (PhasedUnit pu: dependencies) {
monitor.subTask("- scanning type declarations " + pu.getUnit().getFilename());
pu.scanTypeDeclarations();
monitor.worked(2);
}
for (PhasedUnit pu: dependencies) {
pu.validateRefinement(); //TODO: only needed for type hierarchy view in IDE!
}
// Then typecheck the changed source of this project
Set<String> cleanedPackages = new HashSet<String>();
List<PhasedUnit> phasedUnitsToUpdate = new ArrayList<PhasedUnit>();
for (IFile fileToUpdate : sourceToCompile) {
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
// skip non-ceylon files
if(!isCeylon(fileToUpdate)) {
if (isJava(fileToUpdate)) {
Unit toRemove = getJavaUnit(project, fileToUpdate);
if(toRemove instanceof JavaUnit) {
((JavaUnit) toRemove).remove();
}
else {
String packageName = getPackageName(fileToUpdate);
if (! cleanedPackages.contains(packageName)) {
modelLoader.clearCachesOnPackage(packageName);
cleanedPackages.add(packageName);
}
}
}
continue;
}
ResourceVirtualFile file = ResourceVirtualFile.createResourceVirtualFile(fileToUpdate);
IFolder srcFolder = getRootFolder(fileToUpdate);
ProjectPhasedUnit alreadyBuiltPhasedUnit = (ProjectPhasedUnit) pus.getPhasedUnit(file);
Package pkg = null;
if (alreadyBuiltPhasedUnit!=null) {
// Editing an already built file
pkg = alreadyBuiltPhasedUnit.getPackage();
}
else {
IFolder packageFolder = (IFolder) file.getResource().getParent();
pkg = getPackage(packageFolder);
}
if (srcFolder == null || pkg == null) {
continue;
}
ResourceVirtualFile srcDir = new IFolderVirtualFile(project, srcFolder.getProjectRelativePath());
PhasedUnit newPhasedUnit = parseFileToPhasedUnit(moduleManager, typeChecker, file, srcDir, pkg);
phasedUnitsToUpdate.add(newPhasedUnit);
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
if (phasedUnitsToUpdate.size() == 0) {
return phasedUnitsToUpdate;
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
assert(phasedUnit instanceof ProjectPhasedUnit);
((ProjectPhasedUnit)phasedUnit).install();
}
modelLoader.setupSourceFileObjects(phasedUnitsToUpdate);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
if (! phasedUnit.isDeclarationsScanned()) {
phasedUnit.validateTree();
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
phasedUnit.visitSrcModulePhase();
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
phasedUnit.visitRemainingModulePhase();
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
if (! phasedUnit.isDeclarationsScanned()) {
monitor.subTask("- scanning declarations " + phasedUnit.getUnit().getFilename());
phasedUnit.scanDeclarations();
}
monitor.worked(1);
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
if (! phasedUnit.isTypeDeclarationsScanned()) {
monitor.subTask("- scanning type declarations " + phasedUnit.getUnit().getFilename());
phasedUnit.scanTypeDeclarations();
}
monitor.worked(2);
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
if (! phasedUnit.isRefinementValidated()) {
phasedUnit.validateRefinement();
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
if (! phasedUnit.isFullyTyped()) {
monitor.subTask("- typechecking " + phasedUnit.getUnit().getFilename());
phasedUnit.analyseTypes();
if (showWarnings(project)) {
phasedUnit.analyseUsage();
}
monitor.worked(3);
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit phasedUnit : phasedUnitsToUpdate) {
phasedUnit.analyseFlow();
}
UnknownTypeCollector utc = new UnknownTypeCollector();
for (PhasedUnit pu : phasedUnitsToUpdate) {
pu.getCompilationUnit().visit(utc);
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.done();
return phasedUnitsToUpdate;
}
private Unit getJavaUnit(IProject project, IFile fileToUpdate) {
IJavaElement javaElement = (IJavaElement) fileToUpdate.getAdapter(IJavaElement.class);
if (javaElement instanceof ICompilationUnit) {
ICompilationUnit compilationUnit = (ICompilationUnit) javaElement;
IJavaElement packageFragment = compilationUnit.getParent();
JDTModelLoader projectModelLoader = getProjectModelLoader(project);
// TODO : Why not use the Model Loader cache to get the declaration
// instead of iterating through all the packages ?
if (projectModelLoader != null) {
Package pkg = projectModelLoader.findPackage(packageFragment.getElementName());
if (pkg != null) {
for (Declaration decl : pkg.getMembers()) {
Unit unit = decl.getUnit();
if (unit.getFilename().equals(fileToUpdate.getName())) {
return unit;
}
}
}
}
}
return null;
}
private List<PhasedUnit> fullTypeCheck(IProject project,
TypeChecker typeChecker, IProgressMonitor mon)
throws CoreException {
List<PhasedUnits> phasedUnitsOfDependencies = typeChecker.getPhasedUnitsOfDependencies();
List<PhasedUnit> dependencies = new ArrayList<PhasedUnit>();
for (PhasedUnits phasedUnits: phasedUnitsOfDependencies) {
for (PhasedUnit phasedUnit: phasedUnits.getPhasedUnits()) {
dependencies.add(phasedUnit);
}
}
final List<PhasedUnit> listOfUnits = typeChecker.getPhasedUnits().getPhasedUnits();
SubMonitor monitor = SubMonitor.convert(mon,
"Typechecking " + listOfUnits.size() + " source files of project " +
project.getName(), dependencies.size()*5+listOfUnits.size()*6);
monitor.subTask("- typechecking source archives for project "
+ project.getName());
JDTModelLoader loader = getModelLoader(typeChecker);
// loader.reset();
for (PhasedUnit pu: dependencies) {
monitor.subTask("- scanning declarations " + pu.getUnit().getFilename());
pu.scanDeclarations();
monitor.worked(1);
}
for (PhasedUnit pu: dependencies) {
monitor.subTask("- scanning type declarations " + pu.getUnit().getFilename());
pu.scanTypeDeclarations();
monitor.worked(2);
}
for (PhasedUnit pu: dependencies) {
pu.validateRefinement(); //TODO: only needed for type hierarchy view in IDE!
}
Module languageModule = loader.getLanguageModule();
loader.loadPackage(languageModule, "com.redhat.ceylon.compiler.java.metadata", true);
loader.loadPackage(languageModule, LANGUAGE_MODULE_NAME, true);
loader.loadPackage(languageModule, "ceylon.language.descriptor", true);
loader.loadPackageDescriptors();
monitor.subTask("(typechecking source files for project "
+ project.getName() +")");
for (PhasedUnit pu : listOfUnits) {
if (! pu.isDeclarationsScanned()) {
monitor.subTask("- scanning declarations " + pu.getUnit().getFilename());
pu.validateTree();
pu.scanDeclarations();
monitor.worked(1);
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit pu : listOfUnits) {
if (! pu.isTypeDeclarationsScanned()) {
monitor.subTask("- scanning types " + pu.getUnit().getFilename());
pu.scanTypeDeclarations();
monitor.worked(2);
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit pu: listOfUnits) {
if (! pu.isRefinementValidated()) {
pu.validateRefinement();
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit pu : listOfUnits) {
if (! pu.isFullyTyped()) {
monitor.subTask("- typechecking " + pu.getUnit().getFilename());
pu.analyseTypes();
if (showWarnings(project)) {
pu.analyseUsage();
}
monitor.worked(3);
}
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
for (PhasedUnit pu: listOfUnits) {
pu.analyseFlow();
}
UnknownTypeCollector utc = new UnknownTypeCollector();
for (PhasedUnit pu : listOfUnits) {
pu.getCompilationUnit().visit(utc);
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
projectModuleDependencies.get(project).addModulesWithDependencies(typeChecker.getContext().getModules().getListOfModules());
monitor.done();
return typeChecker.getPhasedUnits().getPhasedUnits();
}
public static TypeChecker parseCeylonModel(final IProject project,
final IProgressMonitor mon) throws CoreException {
return doWithCeylonModelCaching(new Callable<TypeChecker>() {
@Override
public TypeChecker call() throws CoreException {
SubMonitor monitor = SubMonitor.convert(mon,
"Setting up typechecker for project " + project.getName(), 113);
modelStates.put(project, ModelState.Parsing);
typeCheckers.remove(project);
projectRepositoryManagers.remove(project);
projectFiles.remove(project);
if (projectModuleDependencies.containsKey(project)) {
projectModuleDependencies.get(project).reset();
} else {
projectModuleDependencies.put(project, new ModuleDependencies());
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
final IJavaProject javaProject = JavaCore.create(project);
TypeChecker typeChecker = buildTypeChecker(project, javaProject);
PhasedUnits phasedUnits = typeChecker.getPhasedUnits();
JDTModuleManager moduleManager = (JDTModuleManager) phasedUnits.getModuleManager();
moduleManager.setTypeChecker(typeChecker);
Context context = typeChecker.getContext();
JDTModelLoader modelLoader = (JDTModelLoader) moduleManager.getModelLoader();
Module defaultModule = context.getModules().getDefaultModule();
monitor.worked(1);
monitor.subTask("- parsing source files for project "
+ project.getName());
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
phasedUnits.getModuleManager().prepareForTypeChecking();
List<IFile> scannedFiles = scanFiles(project, javaProject,
typeChecker, phasedUnits, moduleManager, modelLoader,
defaultModule, monitor.newChild(10));
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
modelLoader.setupSourceFileObjects(typeChecker.getPhasedUnits().getPhasedUnits());
monitor.worked(1);
// Parsing of ALL units in the source folder should have been done
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.subTask("- determining module dependencies for "
+ project.getName());
phasedUnits.visitModules();
//By now the language module version should be known (as local)
//or we should use the default one.
Module languageModule = context.getModules().getLanguageModule();
if (languageModule.getVersion() == null) {
languageModule.setVersion(TypeChecker.LANGUAGE_MODULE_VERSION);
}
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
final ModuleValidator moduleValidator = new ModuleValidator(context, phasedUnits) {
@Override
protected void executeExternalModulePhases() {}
@Override
protected Exception catchIfPossible(Exception e) {
if (e instanceof OperationCanceledException) {
throw (OperationCanceledException)e;
}
return e;
}
};
final int maxModuleValidatorWork = 100000;
final SubMonitor validatorProgress = SubMonitor.convert(monitor.newChild(100), maxModuleValidatorWork);
moduleValidator.setListener(new ModuleValidator.ProgressListener() {
@Override
public void retrievingModuleArtifact(final Module module,
final ArtifactContext artifactContext) {
final long numberOfModulesNotAlreadySearched = moduleValidator.numberOfModulesNotAlreadySearched();
final long totalNumberOfModules = numberOfModulesNotAlreadySearched + moduleValidator.numberOfModulesAlreadySearched();
final long oneModuleWork = maxModuleValidatorWork / totalNumberOfModules;
final int workRemaining = (int) ((double)numberOfModulesNotAlreadySearched * oneModuleWork);
validatorProgress.setWorkRemaining(workRemaining);
artifactContext.setCallback(new ArtifactCallback() {
SubMonitor artifactProgress = null;
long size;
long alreadyDownloaded = 0;
StringBuilder messageBuilder = new StringBuilder("- downloading module ")
.append(module.getSignature())
.append(' ');
@Override
public void start(String nodeFullPath, long size, String contentStore) {
this.size = size;
int ticks = size > 0 ? (int) size : 100000;
artifactProgress = SubMonitor.convert(validatorProgress.newChild((int)oneModuleWork), ticks);
if (! contentStore.isEmpty()) {
messageBuilder.append("from ").append(contentStore);
}
artifactProgress.subTask(messageBuilder.toString());
if (artifactProgress.isCanceled()) {
throw new OperationCanceledException("Interrupted the download of module : " + module.getSignature());
}
}
@Override
public void read(byte[] bytes, int length) {
if (artifactProgress.isCanceled()) {
throw new OperationCanceledException("Interrupted the download of module : " + module.getSignature());
}
if (size < 0) {
artifactProgress.setWorkRemaining(length*100);
} else {
artifactProgress.subTask(new StringBuilder(messageBuilder)
.append(" ( ")
.append(alreadyDownloaded * 100 / size)
.append("% )").toString());
}
alreadyDownloaded += length;
artifactProgress.worked(length);
}
@Override
public void error(File localFile, Throwable t) {
localFile.delete();
artifactProgress.setWorkRemaining(0);
}
@Override
public void done(File arg0) {
artifactProgress.setWorkRemaining(0);
}
});
}
@Override
public void resolvingModuleArtifact(Module module,
ArtifactResult artifactResult) {
long numberOfModulesNotAlreadySearched = moduleValidator.numberOfModulesNotAlreadySearched();
validatorProgress.setWorkRemaining((int) (numberOfModulesNotAlreadySearched * 100
/ (numberOfModulesNotAlreadySearched + moduleValidator.numberOfModulesAlreadySearched())));
validatorProgress.subTask(new StringBuilder("- resolving module ")
.append(module.getSignature())
.toString());
}
});
moduleValidator.verifyModuleDependencyTree();
validatorProgress.setWorkRemaining(0);
typeChecker.setPhasedUnitsOfDependencies(moduleValidator.getPhasedUnitsOfDependencies());
for (PhasedUnits dependencyPhasedUnits: typeChecker.getPhasedUnitsOfDependencies()) {
modelLoader.addSourceArchivePhasedUnits(dependencyPhasedUnits.getPhasedUnits());
}
modelLoader.setModuleAndPackageUnits();
if (compileToJs(project)) {
for (Module module : typeChecker.getContext().getModules().getListOfModules()) {
if (module instanceof JDTModule) {
JDTModule jdtModule = (JDTModule) module;
if (jdtModule.isCeylonArchive()) {
File artifact = getProjectRepositoryManager(project).getArtifact(
new ArtifactContext(
jdtModule.getNameAsString(),
jdtModule.getVersion(),
ArtifactContext.JS));
if (artifact == null) {
moduleManager.attachErrorToOriginalModuleImport(jdtModule,
"module not available for JavaScript platform: '" +
module.getNameAsString() + "' \"" +
module.getVersion() + "\"");
}
}
}
}
}
monitor.worked(1);
typeCheckers.put(project, typeChecker);
projectFiles.put(project, scannedFiles);
modelStates.put(project, ModelState.Parsed);
ExternalSourceArchiveManager externalArchiveManager = getExternalSourceArchiveManager();
if (allClasspathContainersInitialized()) {
externalArchiveManager.cleanUp(monitor);
}
for (IPath sourceArchivePath : getExternalSourceArchives(getProjectExternalModules(project))) {
if (externalArchiveManager.getSourceArchive(sourceArchivePath) == null) {
externalArchiveManager.addSourceArchive(sourceArchivePath, true);
}
}
externalArchiveManager.createPendingSourceArchives(monitor);
for (ICeylonModelListener listener : modelListeners) {
listener.modelParsed(project);
}
monitor.done();
return typeChecker;
}
});
}
private static TypeChecker buildTypeChecker(IProject project,
final IJavaProject javaProject) throws CoreException {
TypeCheckerBuilder typeCheckerBuilder = new TypeCheckerBuilder()
.verbose(false)
.moduleManagerFactory(new ModuleManagerFactory(){
@Override
public ModuleManager createModuleManager(Context context) {
return new JDTModuleManager(context, javaProject);
}
});
RepositoryManager repositoryManager = getProjectRepositoryManager(project);
typeCheckerBuilder.setRepositoryManager(repositoryManager);
TypeChecker typeChecker = typeCheckerBuilder.getTypeChecker();
return typeChecker;
}
private static List<IFile> scanFiles(IProject project, IJavaProject javaProject,
final TypeChecker typeChecker, final PhasedUnits phasedUnits,
final JDTModuleManager moduleManager, final JDTModelLoader modelLoader,
final Module defaultModule, IProgressMonitor mon) throws CoreException {
SubMonitor monitor = SubMonitor.convert(mon, 10000);
final List<IFile> projectFiles = new ArrayList<IFile>();
final Collection<IFolder> sourceFolders = new LinkedList<>();
for (IFolder sourceFolder : getSourceFolders(project)) {
if (sourceFolder.exists()) {
sourceFolders.add(sourceFolder);
}
}
final Collection<IFolder> resourceFolders = new LinkedList<>();
for (IFolder resourceFolder : getResourceFolders(project)) {
if (resourceFolder.exists()) {
resourceFolders.add(resourceFolder);
}
}
// First scan all non-default source modules and attach the contained packages
for (IFolder srcFolder : sourceFolders) {
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
final ResourceVirtualFile srcDir = ResourceVirtualFile.createResourceVirtualFile(srcFolder);
srcFolder.accept(new ModulesScanner(defaultModule, modelLoader, moduleManager,
srcDir, typeChecker, monitor));
}
// Then scan all source files
for (final IFolder sourceFolder : sourceFolders) {
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
final IFolderVirtualFile srcDir = ResourceVirtualFile.createResourceVirtualFile(sourceFolder);
sourceFolder.accept(new RootFolderScanner(RootFolderType.SOURCE, defaultModule, modelLoader, moduleManager,
srcDir, typeChecker, projectFiles,
phasedUnits, monitor));
}
// Then scan all resource files
for (final IFolder resourceFolder : resourceFolders) {
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
final IFolderVirtualFile srcDir = ResourceVirtualFile.createResourceVirtualFile(resourceFolder);
resourceFolder.accept(new RootFolderScanner(RootFolderType.RESOURCE, defaultModule, modelLoader, moduleManager,
srcDir, typeChecker, projectFiles,
phasedUnits, monitor));
}
return projectFiles;
}
private static void addProblemAndTaskMarkers(final List<PhasedUnit> units,
final IProject project) {
for (PhasedUnit phasedUnit: units) {
IFile file = getFile(phasedUnit);
phasedUnit.getCompilationUnit().visit(new MarkerCreator(file));
addTaskMarkers(file, phasedUnit.getTokens());
}
}
private boolean generateBinaries(IProject project, IJavaProject javaProject,
Collection<PhasedUnit> unitsTypecheckedIncrementally,
Collection<IFile> filesToCompile, TypeChecker typeChecker,
IProgressMonitor monitor) throws CoreException {
List<String> options = new ArrayList<String>();
List<File> js_srcdir = new ArrayList<File>();
List<File> js_rsrcdir = new ArrayList<File>();
List<String> js_repos = new ArrayList<String>();
boolean js_verbose = false;
String js_outRepo = null;
String srcPath = "";
for (IFolder sourceFolder : getSourceFolders(project)) {
File sourcePathElement = sourceFolder.getRawLocation().toFile();
if (!srcPath.isEmpty()) {
srcPath += File.pathSeparator;
}
srcPath += sourcePathElement.getAbsolutePath();
js_srcdir.add(sourcePathElement);
}
options.add("-src");
options.add(srcPath);
String resPath = "";
for (IFolder resourceFolder : getResourceFolders(project)) {
File resourcePathElement = resourceFolder.getRawLocation().toFile();
if (!resPath.isEmpty()) {
resPath += File.pathSeparator;
}
resPath += resourcePathElement.getAbsolutePath();
js_rsrcdir.add(resourcePathElement);
}
options.add("-res");
options.add(resPath);
options.add("-encoding");
options.add(project.getDefaultCharset());
for (String repository : getUserRepositories(project)) {
options.add("-rep");
options.add(repository);
js_repos.add(repository);
}
String verbose = System.getProperty("ceylon.verbose");
if (verbose!=null && "true".equals(verbose)) {
options.add("-verbose");
js_verbose = true;
}
options.add("-g:lines,vars,source");
String systemRepo = getInterpolatedCeylonSystemRepo(project);
if(systemRepo != null && !systemRepo.isEmpty()){
options.add("-sysrep");
options.add(systemRepo);
}
final File modulesOutputDir = getCeylonModulesOutputDirectory(project);
if (modulesOutputDir!=null) {
options.add("-out");
options.add(modulesOutputDir.getAbsolutePath());
js_outRepo = modulesOutputDir.getAbsolutePath();
}
List<File> forJavaBackend = new ArrayList<File>();
List<File> forJavascriptBackend = new ArrayList<File>();
List<File> resources = new ArrayList<File>();
for (IFile file : filesToCompile) {
if (isInSourceFolder(file)) {
if(isCeylon(file)) {
forJavaBackend.add(file.getLocation().toFile());
}
if(isJava(file)) {
forJavaBackend.add(file.getLocation().toFile());
}
}
if (isResourceFile(file)) {
resources.add(file.getLocation().toFile());
}
}
// For the moment the JSCompiler doesn't support partial compilation of a module
// so we add all the files to the source files list.
// TODO : When JS partial module compilation is supported, re-integrate these lines
// in the loop above
if (compileToJs(project)) {
for (IFile file : getProjectFiles(project)) {
if (isInSourceFolder(file)) {
if(isCeylon(file) || isJavascript(file)) {
forJavascriptBackend.add(file.getLocation().toFile());
}
}
}
}
PrintWriter printWriter = new PrintWriter(System.out);//(getConsoleErrorStream(), true);
boolean success = true;
//Compile JS first
if ((forJavascriptBackend.size() + resources.size() > 0) && compileToJs(project)) {
success = compileJs(project, typeChecker, js_srcdir, js_rsrcdir, js_repos,
js_verbose, js_outRepo, printWriter, ! compileToJava(project),
forJavascriptBackend, resources);
}
if ((forJavaBackend.size() + resources.size() > 0) && compileToJava(project)) {
// For Java don't stop compiling when encountering errors
options.add("-continue");
// always add the java files, otherwise ceylon code won't see them
// and they won't end up in the archives (src/car)
success = success & compile(project, javaProject, options,
unitsTypecheckedIncrementally,
forJavaBackend, resources, typeChecker, printWriter, monitor);
}
if (! compileToJs(project) &&
! compileToJava(project) &&
modulesOutputDir != null) {
EclipseLogger logger = new EclipseLogger();
RepositoryManager outRepo = repoManager()
.offline(CeylonProjectConfig.get(project).isOffline())
.cwd(project.getLocation().toFile())
.outRepo(js_outRepo)
.logger(logger)
.buildOutputManager();
for (Module m : getProjectDeclaredSourceModules(project)) {
if (m instanceof JDTModule) {
ArtifactCreator sac;
try {
sac = CeylonUtils.makeSourceArtifactCreator(outRepo, js_srcdir,
m.getNameAsString(), m.getVersion(), js_verbose, logger);
List<String> moduleFiles = new ArrayList<>();
for (IFile file : filesToCompile) {
IContainer container = file.getParent();
if (container instanceof IFolder) {
if (isSourceFile(file)) {
Module fileModule = getModule(((IFolder)container));
if (m.equals(fileModule)) {
moduleFiles.add(file.getLocation().toFile().getPath());
}
}
}
}
sac.copy(moduleFiles);
} catch (IOException e) {
e.printStackTrace();
success = false;
}
}
}
}
return success;
}
private boolean compileJs(IProject project, TypeChecker typeChecker,
List<File> js_srcdir, List<File> js_rsrcdir, List<String> js_repos,
boolean js_verbose, String js_outRepo, PrintWriter printWriter,
boolean generateSourceArchive, List<File> sources, List<File> resources)
throws CoreException {
Options jsopts = new Options()
.repos(js_repos)
.sourceDirs(js_srcdir)
.resourceDirs(js_rsrcdir)
.systemRepo(getInterpolatedCeylonSystemRepo(project))
.outRepo(js_outRepo)
.optimize(true)
.verbose(js_verbose ? "all" : null)
.generateSourceArchive(generateSourceArchive)
.encoding(project.getDefaultCharset())
.offline(CeylonProjectConfig.get(project).isOffline());
JsCompiler jsc = new JsCompiler(typeChecker, jsopts) {
@Override
protected boolean nonCeylonUnit(Unit u) {
if (! super.nonCeylonUnit(u)) {
return false;
}
if (u instanceof CeylonBinaryUnit) {
CeylonBinaryUnit ceylonBinaryUnit = (CeylonBinaryUnit) u;
Module module = u.getPackage().getModule();
if (module != null) {
if (module.equals(module.getLanguageModule())) {
return false;
}
}
if (ceylonBinaryUnit.getCeylonSourceRelativePath() != null) {
return false;
}
}
return true;
}
public File getFullPath(PhasedUnit pu) {
VirtualFile virtualFile = pu.getUnitFile();
if (virtualFile instanceof ResourceVirtualFile) {
return ((IFileVirtualFile) virtualFile).getFile().getLocation().toFile();
} else {
return new File(virtualFile.getPath());
}
};
}.stopOnErrors(false);
try {
jsc.setSourceFiles(sources);
jsc.setResourceFiles(resources);
if (!jsc.generate()) {
CompileErrorReporter errorReporter = null;
//Report backend errors
for (Message e : jsc.getErrors()) {
if (e instanceof UnexpectedError) {
if (errorReporter == null) {
errorReporter = new CompileErrorReporter(project);
}
errorReporter.report(new CeylonCompilationError(project, (UnexpectedError)e));
}
}
if (errorReporter != null) {
//System.out.println("Ceylon-JS compiler failed for " + project.getName());
errorReporter.failed();
}
return false;
}
else {
//System.out.println("compile ok to js");
return true;
}
}
catch (IOException ex) {
ex.printStackTrace(printWriter);
return false;
}
}
@SuppressWarnings("deprecation")
private boolean compile(final IProject project, IJavaProject javaProject,
List<String> options, Collection<PhasedUnit> unitsTypecheckedIncrementally,
List<File> sources, List<File> resources,
final TypeChecker typeChecker, PrintWriter printWriter,
IProgressMonitor mon)
throws VerifyError {
int numberOfJavaFiles = 0;
int numberOfCeylonFiles = 0;
for (File file : sources) {
if (JavaCore.isJavaLikeFileName(file.getName())) {
numberOfJavaFiles ++;
} else if (file.getName().endsWith(".ceylon")){
numberOfCeylonFiles ++;
}
}
int numberOfSourceFiles = numberOfCeylonFiles + numberOfJavaFiles;
final SubMonitor monitor = SubMonitor.convert(mon,
"Generating binaries for " + numberOfSourceFiles +
" source files in project " + project.getName(),
numberOfSourceFiles * 2);
com.redhat.ceylon.compiler.java.tools.CeyloncTool compiler;
try {
compiler = new com.redhat.ceylon.compiler.java.tools.CeyloncTool();
} catch (VerifyError e) {
System.err.println("ERROR: Cannot run tests! Did you maybe forget to configure the -Xbootclasspath/p: parameter?");
throw e;
}
CompileErrorReporter errorReporter = new CompileErrorReporter(project);
final com.sun.tools.javac.util.Context context = new com.sun.tools.javac.util.Context();
context.put(com.sun.tools.javac.util.Log.outKey, printWriter);
context.put(DiagnosticListener.class, errorReporter);
CeylonLog.preRegister(context);
final Map<RegularFileObject, Set<String>> inputFilesToGenerate = new HashMap<RegularFileObject, Set<String>>();
BuildFileManager fileManager = new BuildFileManager(context, true, null, project, inputFilesToGenerate);
computeCompilerClasspath(project, javaProject, options);
List<File> allFiles = new ArrayList<>(sources.size()+ resources.size());
allFiles.addAll(sources);
allFiles.addAll(resources);
Iterable<? extends JavaFileObject> unitsToCompile =
fileManager.getJavaFileObjectsFromFiles(allFiles);
if (reuseEclipseModelInCompilation(project)) {
setupJDTModelLoader(project, typeChecker, context, unitsTypecheckedIncrementally);
}
CeyloncTaskImpl task = (CeyloncTaskImpl) compiler.getTask(printWriter,
fileManager, errorReporter, options, null,
unitsToCompile);
task.setTaskListener(new TaskListener() {
@Override
public void started(TaskEvent ta) {
if (! ta.getKind().equals(Kind.PARSE) && ! ta.getKind().equals(Kind.ANALYZE)) {
return;
}
String name = ta.getSourceFile().getName();
name = name.substring(name.lastIndexOf("/")+1);
if (ta.getKind().equals(Kind.PARSE)) {
CompilationUnitTree cut = ta.getCompilationUnit();
if (cut != null && cut instanceof CeylonCompilationUnit) {
monitor.subTask("- transforming " + name);
} else {
monitor.subTask("- parsing " + name);
}
}
if (ta.getKind().equals(Kind.ANALYZE)) {
monitor.subTask("- generating bytecode for " + name);
}
}
@Override
public void finished(TaskEvent ta) {
if (! ta.getKind().equals(Kind.PARSE) && ! ta.getKind().equals(Kind.ANALYZE)) {
return;
}
if (ta.getKind().equals(Kind.PARSE)) {
RegularFileObject sourceFile = BuildFileManager.getSourceFile(ta.getSourceFile());
Set<String> expectedClasses = inputFilesToGenerate.get(sourceFile);
if (expectedClasses == null) {
expectedClasses = new HashSet<String>();
inputFilesToGenerate.put(sourceFile, expectedClasses);
}
if (ta.getCompilationUnit() instanceof JCCompilationUnit) {
JCCompilationUnit cu = (JCCompilationUnit) ta.getCompilationUnit();
for (JCTree def : cu.defs) {
if (def instanceof JCClassDecl) {
expectedClasses.add(((JCClassDecl) def).name.toString());
}
}
}
if (expectedClasses.isEmpty()) {
inputFilesToGenerate.remove(sourceFile);
}
}
monitor.worked(1);
}
});
boolean success=false;
try {
success = task.call();
}
catch (Exception e) {
e.printStackTrace(printWriter);
}
if (!success) {
errorReporter.failed(task.getExitState());
}
fileManager.addUngeneratedErrors();
monitor.done();
return success;
}
private void computeCompilerClasspath(IProject project,
IJavaProject javaProject, List<String> options) {
List<String> classpathElements = new ArrayList<String>();
// Modules projectModules = getProjectModules(project);
// ArtifactContext ctx;
// if (projectModules != null) {
// Module languageModule = projectModules.getLanguageModule();
// ctx = new ArtifactContext(languageModule.getNameAsString(),
// languageModule.getVersion());
// }
// else {
// ctx = new ArtifactContext(LANGUAGE_MODULE_NAME,
// TypeChecker.LANGUAGE_MODULE_VERSION);
// }
//
// ctx.setSuffix(ArtifactContext.CAR);
// RepositoryManager repositoryManager = getProjectRepositoryManager(project);
// if (repositoryManager!=null) {
// //try {
// File languageModuleArchive = repositoryManager.getArtifact(ctx);
// classpathElements.add(languageModuleArchive.getAbsolutePath());
// /*}
// catch (Exception e) {
// e.printStackTrace();
// }*/
// }
addProjectClasspathElements(classpathElements,
javaProject);
try {
for (IProject p: project.getReferencedProjects()) {
if(p.isAccessible()){
addProjectClasspathElements(classpathElements,
JavaCore.create(p));
}
}
}
catch (CoreException ce) {
ce.printStackTrace();
}
options.add("-classpath");
// add the compiletime required jars (those used by the language module implicitely)
classpathElements.addAll(CeylonPlugin.getCompiletimeRequiredJars());
String classpath = "";
for (String cpElement : classpathElements) {
if (! classpath.isEmpty()) {
classpath += File.pathSeparator;
}
classpath += cpElement;
}
options.add(classpath);
}
private void setupJDTModelLoader(final IProject project,
final TypeChecker typeChecker,
final com.sun.tools.javac.util.Context context,
final Collection<PhasedUnit> unitsTypecheckedIncrementally) {
final JDTModelLoader modelLoader = getModelLoader(typeChecker);
context.put(LanguageCompiler.ceylonContextKey, typeChecker.getContext());
context.put(TypeFactory.class, modelLoader.getTypeFactory());
context.put(LanguageCompiler.compilerDelegateKey,
new JdtCompilerDelegate(modelLoader, project, typeChecker, context, unitsTypecheckedIncrementally));
context.put(TypeFactory.class, modelLoader.getTypeFactory());
context.put(ModelLoaderFactory.class, new ModelLoaderFactory() {
@Override
public AbstractModelLoader createModelLoader(
com.sun.tools.javac.util.Context context) {
return modelLoader;
}
});
}
private void addProjectClasspathElements(List<String> classpathElements, IJavaProject javaProj) {
try {
List<IClasspathContainer> containers = getCeylonClasspathContainers(javaProj);
for (IClasspathContainer container : containers) {
for (IClasspathEntry cpEntry : container.getClasspathEntries()) {
if (!isInCeylonClassesOutputFolder(cpEntry.getPath())) {
classpathElements.add(cpEntry.getPath().toOSString());
}
}
}
File outputDir = toFile(javaProj.getProject(), javaProj.getOutputLocation()
.makeRelativeTo(javaProj.getProject().getFullPath()));
classpathElements.add(outputDir.getAbsolutePath());
for (IClasspathEntry cpEntry : javaProj.getResolvedClasspath(true)) {
if (isInCeylonClassesOutputFolder(cpEntry.getPath())) {
classpathElements.add(javaProj.getProject().getLocation().append(cpEntry.getPath().lastSegment()).toOSString());
}
}
}
catch (JavaModelException e1) {
e1.printStackTrace();
}
}
public static boolean isExplodeModulesEnabled(IProject project) {
Map<String,String> args = getBuilderArgs(project);
return args.get("explodeModules")!=null ||
args.get("enableJdtClasses")!=null;
}
public static boolean areAstAwareIncrementalBuildsEnabled(IProject project) {
return CeylonNature.isEnabled(project) && getBuilderArgs(project).get("astAwareIncrementalBuilds")==null;
}
public static boolean compileWithJDTModel = true;
public static boolean reuseEclipseModelInCompilation(IProject project) {
return loadDependenciesFromModelLoaderFirst(project) && compileWithJDTModel;
}
// Keep it false on master until we fix the associated cross-reference and search issues
// by correctly managing source to binary links and indexes
public static boolean loadBinariesFirst = "true".equals(System.getProperty("ceylon.loadBinariesFirst", "true"));
public static boolean loadDependenciesFromModelLoaderFirst(IProject project) {
return compileToJava(project) && loadBinariesFirst;
}
public static boolean showWarnings(IProject project) {
return getBuilderArgs(project).get("hideWarnings")==null;
}
public static boolean compileToJs(IProject project) {
return getBuilderArgs(project).get("compileJs")!=null;
}
public static boolean compileToJava(IProject project) {
return CeylonNature.isEnabled(project) && getBuilderArgs(project).get("compileJava")==null;
}
public static String fileName(ClassMirror c) {
if (c instanceof JavacClass) {
return ((JavacClass) c).classSymbol.classfile.getName();
}
else if (c instanceof JDTClass) {
return ((JDTClass) c).getFileName();
}
else if (c instanceof SourceClass) {
return ((SourceClass) c).getModelDeclaration().getUnit().getFilename();
}
else {
return "another file";
}
}
public static List<String> getUserRepositories(IProject project) throws CoreException {
List<String> userRepos = getCeylonRepositories(project);
userRepos.addAll(getReferencedProjectsOutputRepositories(project));
return userRepos;
}
public static List<String> getAllRepositories(IProject project) throws CoreException {
List<String> allRepos = getUserRepositories(project);
allRepos.add(CeylonProjectConfig.get(project).getMergedRepositories().getCacheRepository().getUrl());
return allRepos;
}
public static List<String> getReferencedProjectsOutputRepositories(IProject project) throws CoreException {
List<String> repos = new ArrayList<String>();
if (project != null) {
for (IProject referencedProject: project.getReferencedProjects()) {
if (referencedProject.isOpen() && CeylonNature.isEnabled(referencedProject)) {
repos.add(getCeylonModulesOutputDirectory(referencedProject).getAbsolutePath());
}
}
}
return repos;
}
private static Map<String,String> getBuilderArgs(IProject project) {
if (project!=null) {
try {
for (ICommand c: project.getDescription().getBuildSpec()) {
if (c.getBuilderName().equals(BUILDER_ID)) {
return c.getArguments();
}
}
}
catch (CoreException e) {
e.printStackTrace();
}
}
return Collections.emptyMap();
}
public static List<String> getCeylonRepositories(IProject project) {
CeylonProjectConfig projectConfig = CeylonProjectConfig.get(project);
List<String> projectLookupRepos = projectConfig.getProjectLocalRepos();
List<String> globalLookupRepos = projectConfig.getGlobalLookupRepos();
List<String> projectRemoteRepos = projectConfig.getProjectRemoteRepos();
List<String> otherRemoteRepos = projectConfig.getOtherRemoteRepos();
List<String> repos = new ArrayList<String>();
repos.addAll(projectLookupRepos);
repos.addAll(globalLookupRepos);
repos.addAll(projectRemoteRepos);
repos.addAll(otherRemoteRepos);
return repos;
}
private static File toFile(IProject project, IPath path) {
return project.getFolder(path).getRawLocation().toFile();
}
private static void clearMarkersOn(IResource resource, boolean alsoDeleteBackendErrors) {
clearMarkersOn(resource, alsoDeleteBackendErrors, false);
}
private static void clearMarkersOn(IResource resource, boolean alsoDeleteBackendErrors, boolean onlyBackendErrors) {
try {
if (!onlyBackendErrors) {
resource.deleteMarkers(TASK_MARKER_ID, false, DEPTH_INFINITE);
resource.deleteMarkers(PROBLEM_MARKER_ID, true, DEPTH_INFINITE);
}
if (alsoDeleteBackendErrors) {
resource.deleteMarkers(PROBLEM_MARKER_ID + ".backend", true, DEPTH_INFINITE);
for (IMarker javaMarker : resource.findMarkers(IJavaModelMarker.JAVA_MODEL_PROBLEM_MARKER, false, IResource.DEPTH_INFINITE)) {
if (CeylonPlugin.PLUGIN_ID.equals(javaMarker.getAttribute(IMarker.SOURCE_ID))) {
javaMarker.delete();
}
}
}
if (!onlyBackendErrors) {
//these are actually errors from the Ceylon compiler, but
//we did not bother creating a separate annotation type!
resource.deleteMarkers(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER, true, DEPTH_INFINITE);
}
}
catch (CoreException e) {
e.printStackTrace();
}
}
private static void clearProjectMarkers(IProject project, boolean nonBackendMarkers, boolean backendMarkers) {
//project.deleteMarkers(IJavaModelMarker.BUILDPATH_PROBLEM_MARKER, true, DEPTH_ZERO);
if (nonBackendMarkers) {
try {
project.deleteMarkers(PROBLEM_MARKER_ID, true, DEPTH_ZERO);
} catch (CoreException e) {
e.printStackTrace();
}
}
if (backendMarkers) {
try {
project.deleteMarkers(PROBLEM_MARKER_ID + ".backend", true, DEPTH_ZERO);
} catch (CoreException e) {
e.printStackTrace();
}
}
}
private static void clearMarkersOn(Collection<IFile> files, boolean alsoDeleteBackendErrors, boolean onlyBackendErrors) {
for(IFile file: files) {
clearMarkersOn(file, alsoDeleteBackendErrors, onlyBackendErrors);
}
}
private static void clearMarkersOn(Collection<IFile> files, boolean alsoDeleteBackendErrors) {
clearMarkersOn(files, alsoDeleteBackendErrors, false);
}
/*private void dumpSourceList(Collection<IFile> sourcesToCompile) {
MessageConsoleStream consoleStream= getConsoleStream();
for(Iterator<IFile> iter= sourcesToCompile.iterator(); iter.hasNext(); ) {
IFile srcFile= iter.next();
consoleStream.println(" " + srcFile.getFullPath());
}
}*/
// protected static MessageConsoleStream getConsoleStream() {
// return findConsole().newMessageStream();
// }
//
// protected static MessageConsoleStream getConsoleErrorStream() {
// final MessageConsoleStream stream = findConsole().newMessageStream();
// //TODO: all this, just to get the color red? can that be right??
// /*try {
// getWorkbench().getProgressService().runInUI(getWorkbench().getWorkbenchWindows()[0],
// new IRunnableWithProgress() {
//
// @Override
// public void run(IProgressMonitor monitor) throws InvocationTargetException,
// InterruptedException {
// stream.setColor(getWorkbench().getDisplay().getSystemColor(SWT.COLOR_RED));
// }
// }, null);
// }
// catch (Exception e) {
// e.printStackTrace();
// }*/
// return stream;
// }
//
// private String timedMessage(String message) {
// long elapsedTimeMs = (System.nanoTime() - startTime) / 1000000;
// return String.format("[%1$10d] %2$s", elapsedTimeMs, message);
// }
// /**
// * Find or create the console with the given name
// * @param consoleName
// */
// protected static MessageConsole findConsole() {
// String consoleName = CEYLON_CONSOLE;
// MessageConsole myConsole= null;
// final IConsoleManager consoleManager= ConsolePlugin.getDefault().getConsoleManager();
// IConsole[] consoles= consoleManager.getConsoles();
// for(int i= 0; i < consoles.length; i++) {
// IConsole console= consoles[i];
// if (console.getName().equals(consoleName))
// myConsole= (MessageConsole) console;
// }
// if (myConsole == null) {
// myConsole= new MessageConsole(consoleName,
// CeylonPlugin.getInstance().getImageRegistry()
// .getDescriptor(CeylonResources.BUILDER));
// consoleManager.addConsoles(new IConsole[] { myConsole });
// }
//// consoleManager.showConsoleView(myConsole);
// return myConsole;
// }
private static void addTaskMarkers(IFile file, List<CommonToken> tokens) {
// clearTaskMarkersOn(file);
for (CommonToken token : tokens) {
if (token.getType() == CeylonLexer.LINE_COMMENT || token.getType() == CeylonLexer.MULTI_COMMENT) {
CeylonTaskUtil.addTaskMarkers(token, file);
}
}
}
@Override
protected void clean(IProgressMonitor monitor) throws CoreException {
super.clean(monitor);
IProject project = getProject();
// startTime = System.nanoTime();
// getConsoleStream().println("\n===================================");
// getConsoleStream().println(timedMessage("Starting Ceylon clean on project: " + project.getName()));
// getConsoleStream().println("-----------------------------------");
cleanupModules(monitor, project);
cleanupJdtClasses(monitor, project);
monitor.subTask("Clearing project and source markers for project " + project.getName());
clearProjectMarkers(project, true, true);
clearMarkersOn(project, true);
// getConsoleStream().println("-----------------------------------");
// getConsoleStream().println(timedMessage("End Ceylon clean on project: " + project.getName()));
// getConsoleStream().println("===================================");
}
private void cleanupJdtClasses(IProgressMonitor monitor, IProject project) {
if (isExplodeModulesEnabled(project)) {
monitor.subTask("Cleaning exploded modules directory of project " + project.getName());
final File ceylonOutputDirectory = getCeylonClassesOutputDirectory(project);
new RepositoryLister(Arrays.asList(".*")).list(ceylonOutputDirectory,
new RepositoryLister.Actions() {
@Override
public void doWithFile(File path) {
path.delete();
}
public void exitDirectory(File path) {
if (path.list().length == 0 &&
!path.equals(ceylonOutputDirectory)) {
path.delete();
}
}
});
}
}
private void cleanupModules(IProgressMonitor monitor, IProject project) {
final File modulesOutputDirectory = getCeylonModulesOutputDirectory(project);
if (modulesOutputDirectory != null) {
monitor.subTask("Cleaning existing artifacts of project " + project.getName());
List<String> extensionsToDelete = Arrays.asList(".jar", ".js", ".car", ".src", ".sha1");
new RepositoryLister(extensionsToDelete).list(modulesOutputDirectory,
new RepositoryLister.Actions() {
@Override
public void doWithFile(File path) {
path.delete();
}
public void exitDirectory(File path) {
if (path.list().length == 0 &&
!path.equals(modulesOutputDirectory)) {
path.delete();
}
}
});
}
}
public static IFile getFile(PhasedUnit phasedUnit) {
return ((IFileVirtualFile) phasedUnit.getUnitFile()).getFile();
}
// TODO think: doRefresh(file.getParent()); // N.B.: Assumes all
// generated files go into parent folder
private static List<IFile> getProjectFiles(IProject project) {
return projectFiles.get(project);
}
public static TypeChecker getProjectTypeChecker(IProject project) {
return typeCheckers.get(project);
}
public static Modules getProjectModules(IProject project) {
TypeChecker typeChecker = getProjectTypeChecker(project);
if (typeChecker == null) {
return null;
}
return typeChecker.getContext().getModules();
}
public static Collection<JDTModule> getProjectExternalModules(IProject project) {
TypeChecker typeChecker = getProjectTypeChecker(project);
if (typeChecker == null) {
return Collections.emptyList();
}
List<JDTModule> modules = new ArrayList<>();
for (Module m : typeChecker.getContext().getModules().getListOfModules()) {
if (m instanceof JDTModule) {
JDTModule module = (JDTModule) m;
if (! module.isProjectModule()) {
modules.add(module);
}
}
}
return modules;
}
public static Collection<Module> getProjectSourceModules(IProject project) {
List<Module> moduleList = new ArrayList<Module>();
moduleList.addAll(getProjectDeclaredSourceModules(project));
Modules projectModules = getProjectModules(project);
if (projectModules != null) {
moduleList.add(projectModules.getDefaultModule());
}
return moduleList;
}
public static Collection<Module> getProjectDeclaredSourceModules(IProject project) {
TypeChecker typeChecker = getProjectTypeChecker(project);
if (typeChecker == null) {
return Collections.emptyList();
}
List<Module> modules = new ArrayList<>();
for (Module m : typeChecker.getPhasedUnits().getModuleManager().getCompiledModules()) {
if (m instanceof JDTModule) {
JDTModule module = (JDTModule) m;
if (module.isProjectModule()) {
modules.add(module);
}
}
}
return modules;
}
public static RepositoryManager getProjectRepositoryManager(IProject project) {
RepositoryManager repoManager = projectRepositoryManagers.get(project);
if (repoManager == null) {
try {
repoManager = resetProjectRepositoryManager(project);
} catch(CoreException e) {
e.printStackTrace();
}
}
return repoManager;
}
public static RepositoryManager resetProjectRepositoryManager(IProject project) throws CoreException {
RepositoryManager repositoryManager = repoManager()
.offline(CeylonProjectConfig.get(project).isOffline())
.cwd(project.getLocation().toFile())
.systemRepo(getInterpolatedCeylonSystemRepo(project))
.extraUserRepos(getReferencedProjectsOutputRepositories(project))
.logger(new EclipseLogger())
.isJDKIncluded(true)
.buildManager();
projectRepositoryManagers.put(project, repositoryManager);
return repositoryManager;
}
public static Collection<IProject> getProjects() {
return typeCheckers.keySet();
}
public static Collection<TypeChecker> getTypeCheckers() {
return typeCheckers.values();
}
public static void removeProject(IProject project) {
typeCheckers.remove(project);
projectFiles.remove(project);
modelStates.remove(project);
containersInitialized.remove(project);
projectRepositoryManagers.remove(project);
CeylonProjectConfig.remove(project);
JavaProjectStateMirror.cleanup(project);
projectModuleDependencies.remove(project);
}
public static List<IFolder> getSourceFolders(IProject project) {
//TODO: is the call to JavaCore.create() very expensive??
List<IPath> folderPaths = getSourceFolders(JavaCore.create(project));
List<IFolder> sourceFolders = new ArrayList<>(folderPaths.size());
for (IPath path : folderPaths) {
IResource r = project.findMember(path.makeRelativeTo(project.getFullPath()));
if (r instanceof IFolder) {
sourceFolders.add((IFolder) r);
}
}
return sourceFolders;
}
/**
* Read the IJavaProject classpath configuration and populate the ISourceProject's
* build path accordingly.
*/
public static List<IPath> getSourceFolders(IJavaProject javaProject) {
if (javaProject.exists()) {
try {
List<IPath> projectSourceFolders = new ArrayList<IPath>();
for (IClasspathEntry entry: javaProject.getRawClasspath()) {
IPath path = entry.getPath();
if (isCeylonSourceEntry(entry)) {
projectSourceFolders.add(path);
}
}
return projectSourceFolders;
}
catch (JavaModelException e) {
e.printStackTrace();
}
}
return Collections.emptyList();
}
public static List<IFolder> getResourceFolders(IProject project) {
LinkedList<IFolder> resourceFolers = new LinkedList<>();
if (project.exists()) {
for (String resourceInConfig : CeylonProjectConfig.get(project).getResourceDirectories()) {
class FolderHolder {
IFolder resourceFolder;
}
final FolderHolder folderHolder = new FolderHolder();;
final IPath path = Path.fromOSString(resourceInConfig);
if (! path.isAbsolute()) {
folderHolder.resourceFolder = project.getFolder(path);
} else {
try {
project.accept(new IResourceVisitor() {
@Override
public boolean visit(IResource resource)
throws CoreException {
if (resource instanceof IFolder &&
resource.isLinked() &&
resource.getLocation() != null &&
resource.getLocation().equals(path)) {
folderHolder.resourceFolder = (IFolder) resource;
return false;
}
return resource instanceof IFolder ||
resource instanceof IProject;
}
});
}
catch (CoreException e) {
e.printStackTrace();
}
}
if (folderHolder.resourceFolder != null &&
folderHolder.resourceFolder.exists()) {
resourceFolers.add(folderHolder.resourceFolder);
}
}
}
return resourceFolers;
}
public static List<IFolder> getRootFolders(IProject project) {
LinkedList<IFolder> rootFolders = new LinkedList<>();
rootFolders.addAll(getSourceFolders(project));
rootFolders.addAll(getResourceFolders(project));
return rootFolders;
}
public static boolean isCeylonSourceEntry(IClasspathEntry entry) {
if (entry.getEntryKind()!=IClasspathEntry.CPE_SOURCE) {
return false;
}
for (IPath exclusionPattern : entry.getExclusionPatterns()) {
if (exclusionPattern.toString().endsWith(".ceylon")) {
return false;
}
}
return true;
}
public static IFolder getRootFolder(IFolder folder) {
if (folder.isLinked(IResource.CHECK_ANCESTORS)) {
return null;
}
if (! folder.exists()) {
for (IFolder sourceFolder: getSourceFolders(folder.getProject())) {
if (sourceFolder.getFullPath().isPrefixOf(folder.getFullPath())) {
return sourceFolder;
}
}
for (IFolder resourceFolder: getResourceFolders(folder.getProject())) {
if (resourceFolder.getFullPath().isPrefixOf(folder.getFullPath())) {
return resourceFolder;
}
}
return null;
}
try {
Object property = folder.getSessionProperty(RESOURCE_PROPERTY_ROOT_FOLDER);
if (property instanceof IFolder) {
return (IFolder) property;
}
} catch (CoreException e) {
CeylonPlugin.getInstance().getLog().log(new Status(Status.WARNING, CeylonPlugin.PLUGIN_ID, "Unexpected exception", e));
}
return null;
}
public static RootFolderType getRootFolderType(IPackageFragmentRoot pfr) {
IResource resource = null;
try {
resource = pfr.getCorrespondingResource();
} catch (JavaModelException e) {
}
if (resource instanceof IFolder) {
return getRootFolderType((IFolder) resource);
}
return null;
}
public static boolean isSourceFolder(IPackageFragmentRoot pfr) {
return RootFolderType.SOURCE.equals(getRootFolderType(pfr));
}
public static boolean isResourceFolder(IPackageFragmentRoot pfr) {
return RootFolderType.RESOURCE.equals(getRootFolderType(pfr));
}
public static boolean isInSourceFolder(IPackageFragment pf) {
return RootFolderType.SOURCE.equals(getRootFolderType(pf));
}
public static boolean isInResourceFolder(IPackageFragment pf) {
return RootFolderType.RESOURCE.equals(getRootFolderType(pf));
}
public static RootFolderType getRootFolderType(IPackageFragment pf) {
IResource resource = null;
try {
resource = pf.getCorrespondingResource();
} catch (JavaModelException e) {
}
if (resource instanceof IFolder) {
return getRootFolderType((IFolder) resource);
}
return null;
}
public static IFolder getRootFolder(IFile file) {
if (file.getParent() instanceof IFolder) {
return getRootFolder((IFolder) file.getParent());
}
return null;
}
public static RootFolderType getRootFolderType(IFolder folder) {
IFolder rootFolder = getRootFolder(folder);
if (rootFolder == null) {
return null;
}
try {
Object property = rootFolder.getSessionProperty(RESOURCE_PROPERTY_ROOT_FOLDER_TYPE);
if (property instanceof RootFolderType) {
return (RootFolderType) property;
}
} catch (CoreException e) {
CeylonPlugin.getInstance().getLog().log(new Status(Status.WARNING, CeylonPlugin.PLUGIN_ID, "Unexpected exception", e));
}
return null;
}
public static RootFolderType getRootFolderType(IFile file) {
IFolder rootFolder = getRootFolder(file);
if (rootFolder == null) {
return null;
}
return getRootFolderType(rootFolder);
}
public static boolean isInSourceFolder(IFile file) {
return getRootFolderType(file) == RootFolderType.SOURCE;
}
public static String getPackageName(IResource resource) {
if (resource instanceof IFolder) {
return getPackage((IFolder) resource).getQualifiedNameString();
}
if (resource instanceof IFile) {
return getPackage((IFile) resource).getQualifiedNameString();
}
return null;
}
public static Package getPackage(IFolder resource) {
if (resource.isLinked(IResource.CHECK_ANCESTORS)) {
return null;
}
Object property = null;
if (! resource.exists()) {
IFolder rootFolder = getRootFolder(resource);
if (rootFolder != null) {
IPath rootRelativePath = resource.getFullPath().makeRelativeTo(rootFolder.getFullPath());
JDTModelLoader modelLoader = getProjectModelLoader(resource.getProject());
if (modelLoader != null) {
return modelLoader.findPackage(Util.formatPath(Arrays.asList(rootRelativePath.segments()), '.'));
}
}
return null;
}
try {
property = resource.getSessionProperty(RESOURCE_PROPERTY_PACKAGE_MODEL);
} catch (CoreException e) {
CeylonPlugin.getInstance().getLog().log(new Status(Status.WARNING, CeylonPlugin.PLUGIN_ID, "Unexpected exception", e));
}
if (property instanceof WeakReference<?>) {
Object pkg = ((WeakReference<?>) property).get();
if (pkg instanceof Package) {
return (Package) pkg;
}
}
return null;
}
public static Package getPackage(IFile file) {
if (file.getParent() instanceof IFolder) {
return getPackage((IFolder) file.getParent());
}
return null;
}
public static Package getPackage(VirtualFile virtualFile) {
if (virtualFile instanceof IFileVirtualFile) {
return getPackage(((IFileVirtualFile)virtualFile).getFile());
}
if (virtualFile instanceof IFolderVirtualFile) {
return getPackage(((IFolderVirtualFile)virtualFile).getFolder());
}
String virtualPath = virtualFile.getPath();
if (virtualPath.contains("!/")) { // TODO : this test could be replaced by an instanceof if the ZipEntryVirtualFile was public
CeylonUnit ceylonUnit = getUnit(virtualFile);
if (ceylonUnit != null) {
return ceylonUnit.getPackage();
}
}
return null;
}
public static SourceFile getUnit(VirtualFile virtualFile) {
if (virtualFile instanceof IFileVirtualFile) {
IFile file = ((IFileVirtualFile)virtualFile).getFile();
Package p = getPackage(file);
if (p != null) {
for (Unit u : p.getUnits()) {
if (u instanceof SourceFile && u.getFilename().equals(file.getName())) {
return (SourceFile) u;
}
}
}
return null;
}
String virtualPath = virtualFile.getPath();
if (virtualPath.contains("!/")) { // TODO : this test could be replaced by an instanceof if the ZipEntryVirtualFile was public
for (IProject p : getProjects()) {
JDTModuleManager moduleManager = getProjectModuleManager(p);
if (moduleManager != null) {
JDTModule archiveModule = moduleManager.getArchiveModuleFromSourcePath(virtualPath);
if (archiveModule != null) {
ExternalPhasedUnit pu = archiveModule.getPhasedUnit(virtualFile);
if (pu != null) {
return pu.getUnit();
}
}
}
}
}
return null;
}
public static IResourceAware getUnit(IFile file) {
Package p = getPackage(file);
if (p != null) {
for (Unit u: p.getUnits()) {
if (u instanceof IResourceAware) {
if (u.getFilename().equals(file.getName())) {
return (IResourceAware) u;
}
}
}
}
return null;
}
public static Package getPackage(IPackageFragment packageFragment) {
PackageFragment pkg = (PackageFragment) packageFragment;
try {
IFolder srcPkgFolder = (IFolder) pkg.getCorrespondingResource();
if (srcPkgFolder != null) {
return getPackage(srcPkgFolder);
}
} catch (JavaModelException e) {
}
IPackageFragmentRoot root = pkg.getPackageFragmentRoot();
Modules projectModules = getProjectModules(packageFragment.getJavaProject().getProject());
if (projectModules == null) {
return null;
}
for (Module m : projectModules.getListOfModules()) {
if (m instanceof JDTModule && ! m.getNameAsString().equals(Module.DEFAULT_MODULE_NAME)) {
JDTModule module = (JDTModule) m;
for (IPackageFragmentRoot moduleRoot : module.getPackageFragmentRoots()) {
if (root.getPath().equals(moduleRoot.getPath())) {
Package result = module.getDirectPackage(packageFragment.getElementName());
if (result != null) {
return result;
}
}
}
}
}
JDTModule defaultModule = (JDTModule) projectModules.getDefaultModule();
for (IPackageFragmentRoot moduleRoot : defaultModule.getPackageFragmentRoots()) {
if (root.getPath().equals(moduleRoot.getPath())) {
Package result = defaultModule.getDirectPackage(packageFragment.getElementName());
if (result != null) {
return result;
}
}
}
return null;
}
public static JDTModule asSourceModule(IFolder moduleFolder) {
Package p = getPackage(moduleFolder);
if (p != null) {
Module m = p.getModule();
if (m instanceof JDTModule && m.getNameAsString().equals(p.getNameAsString())) {
return (JDTModule) m;
}
}
return null;
}
public static JDTModule asSourceModule(IPackageFragment sourceModuleFragment) {
IFolder moduleFolder;
try {
moduleFolder = (IFolder) sourceModuleFragment.getCorrespondingResource();
if (moduleFolder != null) {
return asSourceModule(moduleFolder);
}
} catch (JavaModelException e) {
}
return null;
}
public static JDTModule getModule(IFolder moduleFolder) {
Package p = getPackage(moduleFolder);
if (p != null) {
Module m = p.getModule();
if (m instanceof JDTModule) {
return (JDTModule) m;
}
}
return null;
}
public static JDTModule getModule(IPackageFragment packageFragment) {
Package p = getPackage(packageFragment);
if (p != null) {
Module m = p.getModule();
if (m instanceof JDTModule) {
return (JDTModule) m;
}
}
return null;
}
public static IJavaModelAware getUnit(IJavaElement javaElement) {
IOpenable openable = javaElement.getOpenable();
if (openable instanceof ITypeRoot) {
Package p = getPackage((IPackageFragment)((ITypeRoot)openable).getParent());
if (p != null) {
String className = ((ITypeRoot)openable).getElementName();
if (className.equals(Naming.PACKAGE_DESCRIPTOR_CLASS_NAME+".class") ||
className.equals(Naming.PACKAGE_DESCRIPTOR_CLASS_NAME.substring(1)+".class")) {
Unit packageUnit = p.getUnit();
if (packageUnit instanceof IJavaModelAware && ((IJavaModelAware) packageUnit).getTypeRoot().equals(openable)) {
return (IJavaModelAware) packageUnit;
}
}
if (className.equals(Naming.MODULE_DESCRIPTOR_CLASS_NAME+".class") ||
className.equals(Naming.OLD_MODULE_DESCRIPTOR_CLASS_NAME+".class")) {
Unit moduleUnit = p.getModule().getUnit();
if (moduleUnit instanceof IJavaModelAware && ((IJavaModelAware) moduleUnit).getTypeRoot().equals(openable)) {
return (IJavaModelAware) moduleUnit;
}
}
for (Declaration d : p.getMembers()) {
Unit u = d.getUnit();
if (u instanceof IJavaModelAware) {
if (u.getFilename().equals(((ITypeRoot) openable).getElementName())) {
return (IJavaModelAware) u;
}
}
}
}
}
return null;
}
private void cleanRemovedFilesFromOutputs(Collection<IFile> filesToRemove,
IProject project) {
if (filesToRemove.size() == 0) {
return;
}
Set<File> moduleJars = new HashSet<File>();
for (IFile file : filesToRemove) {
IFolder rootFolder = getRootFolder(file);
if (rootFolder == null) {
return;
}
String relativeFilePath = file.getProjectRelativePath().makeRelativeTo(rootFolder.getProjectRelativePath()).toString();
Package pkg = getPackage((IFolder)file.getParent());
if (pkg == null) {
return;
}
Module module = pkg.getModule();
TypeChecker typeChecker = typeCheckers.get(project);
if (typeChecker == null) {
return;
}
final File modulesOutputDirectory = getCeylonModulesOutputDirectory(project);
boolean explodeModules = isExplodeModulesEnabled(project);
final File ceylonOutputDirectory = explodeModules ?
getCeylonClassesOutputDirectory(project) : null;
File moduleDir = getModulePath(modulesOutputDirectory, module);
boolean fileIsResource = isResourceFile(file);
//Remove the classes belonging to the source file from the
//module archive and from the JDTClasses directory
File moduleJar = new File(moduleDir, getModuleArchiveName(module));
if(moduleJar.exists()){
moduleJars.add(moduleJar);
try {
List<String> entriesToDelete = new ArrayList<String>();
ZipFile zipFile = new ZipFile(moduleJar);
Properties mapping = CarUtils.retrieveMappingFile(zipFile);
if (fileIsResource) {
entriesToDelete.add(relativeFilePath);
} else {
for (String className : mapping.stringPropertyNames()) {
String sourceFile = mapping.getProperty(className);
if (relativeFilePath.equals(sourceFile)) {
entriesToDelete.add(className);
}
}
}
for (String entryToDelete : entriesToDelete) {
zipFile.removeFile(entryToDelete);
if (explodeModules) {
new File(ceylonOutputDirectory,
entryToDelete.replace('/', File.separatorChar))
.delete();
}
}
} catch (ZipException e) {
e.printStackTrace();
}
}
if (!fileIsResource) {
//Remove the source file from the source archive
File moduleSrc = new File(moduleDir, getSourceArchiveName(module));
if(moduleSrc.exists()){
moduleJars.add(moduleSrc);
try {
ZipFile zipFile = new ZipFile(moduleSrc);
FileHeader fileHeader = zipFile.getFileHeader(relativeFilePath);
if(fileHeader != null){
zipFile.removeFile(fileHeader);
}
} catch (ZipException e) {
e.printStackTrace();
}
}
}
}
// final com.sun.tools.javac.util.Context dummyContext = new com.sun.tools.javac.util.Context();
class ConsoleLog implements Logger {
PrintWriter writer;
ConsoleLog() {
writer = new PrintWriter(System.out); //new PrintWriter(getConsoleStream()));
}
@Override
public void error(String str) {
writer.println("Error: " + str);
}
@Override
public void warning(String str) {
writer.println("Warning: " + str);
}
@Override
public void info(String str) {
}
@Override
public void debug(String str) {
}
}
ConsoleLog log = new ConsoleLog();
for (File moduleJar: moduleJars) {
ShaSigner.sign(moduleJar, log, false);
}
}
private void cleanChangedFilesFromExplodedDirectory(Collection<IFile> changedFiles,
IProject project) {
if (changedFiles.size() == 0) {
return;
}
if (! isExplodeModulesEnabled(project)) {
return;
}
for (IFile file : changedFiles) {
IFolder rootFolder = getRootFolder(file);
if (rootFolder == null) {
return;
}
if (isResourceFile(file)) {
return;
}
String relativeFilePath = file.getProjectRelativePath().makeRelativeTo(rootFolder.getProjectRelativePath()).toString();
Package pkg = getPackage((IFolder)file.getParent());
if (pkg == null) {
return;
}
Module module = pkg.getModule();
TypeChecker typeChecker = typeCheckers.get(project);
if (typeChecker == null) {
return;
}
final File modulesOutputDirectory = getCeylonModulesOutputDirectory(project);
final File ceylonOutputDirectory = getCeylonClassesOutputDirectory(project);
File moduleDir = getModulePath(modulesOutputDirectory, module);
//Remove the classes belonging to the source file from the
//from the .exploded directory
File moduleJar = new File(moduleDir, getModuleArchiveName(module));
if(moduleJar.exists()){
try {
List<String> entriesToDelete = new ArrayList<String>();
ZipFile zipFile = new ZipFile(moduleJar);
Properties mapping = CarUtils.retrieveMappingFile(zipFile);
for (String className : mapping.stringPropertyNames()) {
String sourceFile = mapping.getProperty(className);
if (relativeFilePath.equals(sourceFile)) {
entriesToDelete.add(className);
}
}
for (String entryToDelete : entriesToDelete) {
new File(ceylonOutputDirectory,
entryToDelete.replace('/', File.separatorChar))
.delete();
}
} catch (ZipException e) {
e.printStackTrace();
}
}
}
}
private static File getCeylonClassesOutputDirectory(IProject project) {
return getCeylonClassesOutputFolder(project)
.getRawLocation().toFile();
}
public static IFolder getCeylonClassesOutputFolder(IProject project) {
return project.getFolder(CEYLON_CLASSES_FOLDER_NAME);
}
public static boolean isInCeylonClassesOutputFolder(IPath path) {
//TODO: this is crap!
return path.lastSegment().equals(CEYLON_CLASSES_FOLDER_NAME);
}
public static File getCeylonModulesOutputDirectory(IProject project) {
return getCeylonModulesOutputFolder(project).getRawLocation().toFile();
}
public static IFolder getCeylonModulesOutputFolder(IProject project) {
IPath path = CeylonProjectConfig.get(project).getOutputRepoPath();
return project.getFolder(path.removeFirstSegments(1));
}
public static String getCeylonSystemRepo(IProject project) {
String systemRepo = (String) getBuilderArgs(project).get("systemRepo");
if (systemRepo == null || systemRepo.isEmpty()) {
systemRepo = "${ceylon.repo}";
}
return systemRepo;
}
public static String getInterpolatedCeylonSystemRepo(IProject project) {
return interpolateVariablesInRepositoryPath(getCeylonSystemRepo(project));
}
public static String[] getDefaultUserRepositories() {
return new String[]{
"${ceylon.repo}",
"${user.home}/.ceylon/repo",
Constants.REPO_URL_CEYLON
};
}
public static String interpolateVariablesInRepositoryPath(String repoPath) {
String userHomePath = System.getProperty("user.home");
String pluginRepoPath = CeylonPlugin.getInstance().getCeylonRepository().getAbsolutePath();
return repoPath.replace("${user.home}", userHomePath).
replace("${ceylon.repo}", pluginRepoPath);
}
/**
* String representation for debugging purposes
*/
public String toString() {
return this.getProject() == null ?
"CeylonBuilder for unknown project" :
"CeylonBuilder for " + getProject().getName();
}
public static void setContainerInitialized(IProject project) {
containersInitialized.add(project);
}
public static boolean isContainerInitialized(IProject project) {
return containersInitialized.contains(project);
}
public static boolean allClasspathContainersInitialized() {
for (IProject project : ResourcesPlugin.getWorkspace().getRoot().getProjects()) {
if (project.isAccessible() && CeylonNature.isEnabled(project)
&& ! containersInitialized.contains(project)) {
return false;
}
}
return true;
}
public static ModuleDependencies getModuleDependenciesForProject(
IProject project) {
return projectModuleDependencies.get(project);
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_builder_CeylonBuilder.java
|
259 |
@TestMethodProviders({
LuceneJUnit3MethodProvider.class,
JUnit4MethodProvider.class
})
@Listeners({
ReproduceInfoPrinter.class
})
@RunWith(value = com.carrotsearch.randomizedtesting.RandomizedRunner.class)
@SuppressCodecs(value = "Lucene3x")
// NOTE: this class is in o.a.lucene.util since it uses some classes that are related
// to the test framework that didn't make sense to copy but are package private access
public abstract class AbstractRandomizedTest extends RandomizedTest {
/**
* Annotation for integration tests
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = true, sysProperty = SYSPROP_INTEGRATION)
public @interface IntegrationTests {
}
// --------------------------------------------------------------------
// Test groups, system properties and other annotations modifying tests
// --------------------------------------------------------------------
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_MAXFAILURES = "tests.maxfailures";
/**
* @see #ignoreAfterMaxFailures
*/
public static final String SYSPROP_FAILFAST = "tests.failfast";
public static final String SYSPROP_INTEGRATION = "tests.integration";
// -----------------------------------------------------------------
// Truly immutable fields and constants, initialized once and valid
// for all suites ever since.
// -----------------------------------------------------------------
/**
* Use this constant when creating Analyzers and any other version-dependent stuff.
* <p><b>NOTE:</b> Change this when development starts for new Lucene version:
*/
public static final Version TEST_VERSION_CURRENT = Lucene.VERSION;
/**
* True if and only if tests are run in verbose mode. If this flag is false
* tests are not expected to print any messages.
*/
public static final boolean VERBOSE = systemPropertyAsBoolean("tests.verbose", false);
/**
* A random multiplier which you should use when writing random tests:
* multiply it by the number of iterations to scale your tests (for nightly builds).
*/
public static final int RANDOM_MULTIPLIER = systemPropertyAsInt("tests.multiplier", 1);
/**
* TODO: javadoc?
*/
public static final String DEFAULT_LINE_DOCS_FILE = "europarl.lines.txt.gz";
/**
* the line file used by LineFileDocs
*/
public static final String TEST_LINE_DOCS_FILE = System.getProperty("tests.linedocsfile", DEFAULT_LINE_DOCS_FILE);
/**
* Create indexes in this directory, optimally use a subdir, named after the test
*/
public static final File TEMP_DIR;
static {
String s = System.getProperty("tempDir", System.getProperty("java.io.tmpdir"));
if (s == null)
throw new RuntimeException("To run tests, you need to define system property 'tempDir' or 'java.io.tmpdir'.");
TEMP_DIR = new File(s);
TEMP_DIR.mkdirs();
}
/**
* These property keys will be ignored in verification of altered properties.
*
* @see SystemPropertiesInvariantRule
* @see #ruleChain
* @see #classRules
*/
private static final String[] IGNORED_INVARIANT_PROPERTIES = {
"user.timezone", "java.rmi.server.randomIDs", "sun.nio.ch.bugLevel"
};
// -----------------------------------------------------------------
// Fields initialized in class or instance rules.
// -----------------------------------------------------------------
// -----------------------------------------------------------------
// Class level (suite) rules.
// -----------------------------------------------------------------
/**
* Stores the currently class under test.
*/
private static final TestRuleStoreClassName classNameRule;
/**
* Class environment setup rule.
*/
static final TestRuleSetupAndRestoreClassEnv classEnvRule;
/**
* Suite failure marker (any error in the test or suite scope).
*/
public final static TestRuleMarkFailure suiteFailureMarker =
new TestRuleMarkFailure();
/**
* Ignore tests after hitting a designated number of initial failures. This
* is truly a "static" global singleton since it needs to span the lifetime of all
* test classes running inside this JVM (it cannot be part of a class rule).
* <p/>
* <p>This poses some problems for the test framework's tests because these sometimes
* trigger intentional failures which add up to the global count. This field contains
* a (possibly) changing reference to {@link TestRuleIgnoreAfterMaxFailures} and we
* dispatch to its current value from the {@link #classRules} chain using {@link TestRuleDelegate}.
*/
private static final AtomicReference<TestRuleIgnoreAfterMaxFailures> ignoreAfterMaxFailuresDelegate;
private static final TestRule ignoreAfterMaxFailures;
static {
int maxFailures = systemPropertyAsInt(SYSPROP_MAXFAILURES, Integer.MAX_VALUE);
boolean failFast = systemPropertyAsBoolean(SYSPROP_FAILFAST, false);
if (failFast) {
if (maxFailures == Integer.MAX_VALUE) {
maxFailures = 1;
} else {
Logger.getLogger(LuceneTestCase.class.getSimpleName()).warning(
"Property '" + SYSPROP_MAXFAILURES + "'=" + maxFailures + ", 'failfast' is" +
" ignored.");
}
}
ignoreAfterMaxFailuresDelegate =
new AtomicReference<TestRuleIgnoreAfterMaxFailures>(
new TestRuleIgnoreAfterMaxFailures(maxFailures));
ignoreAfterMaxFailures = TestRuleDelegate.of(ignoreAfterMaxFailuresDelegate);
}
/**
* Temporarily substitute the global {@link TestRuleIgnoreAfterMaxFailures}. See
* {@link #ignoreAfterMaxFailuresDelegate} for some explanation why this method
* is needed.
*/
public static TestRuleIgnoreAfterMaxFailures replaceMaxFailureRule(TestRuleIgnoreAfterMaxFailures newValue) {
return ignoreAfterMaxFailuresDelegate.getAndSet(newValue);
}
/**
* Max 10mb of static data stored in a test suite class after the suite is complete.
* Prevents static data structures leaking and causing OOMs in subsequent tests.
*/
private final static long STATIC_LEAK_THRESHOLD = 10 * 1024 * 1024;
/**
* By-name list of ignored types like loggers etc.
*/
private final static Set<String> STATIC_LEAK_IGNORED_TYPES =
Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(
EnumSet.class.getName())));
private final static Set<Class<?>> TOP_LEVEL_CLASSES =
Collections.unmodifiableSet(new HashSet<Class<?>>(Arrays.asList(
AbstractRandomizedTest.class, LuceneTestCase.class,
ElasticsearchIntegrationTest.class, ElasticsearchTestCase.class)));
/**
* This controls how suite-level rules are nested. It is important that _all_ rules declared
* in {@link LuceneTestCase} are executed in proper order if they depend on each
* other.
*/
@ClassRule
public static TestRule classRules = RuleChain
.outerRule(new TestRuleIgnoreTestSuites())
.around(ignoreAfterMaxFailures)
.around(suiteFailureMarker)
.around(new TestRuleAssertionsRequired())
.around(new StaticFieldsInvariantRule(STATIC_LEAK_THRESHOLD, true) {
@Override
protected boolean accept(java.lang.reflect.Field field) {
// Don't count known classes that consume memory once.
if (STATIC_LEAK_IGNORED_TYPES.contains(field.getType().getName())) {
return false;
}
// Don't count references from ourselves, we're top-level.
if (TOP_LEVEL_CLASSES.contains(field.getDeclaringClass())) {
return false;
}
return super.accept(field);
}
})
.around(new NoClassHooksShadowingRule())
.around(new NoInstanceHooksOverridesRule() {
@Override
protected boolean verify(Method key) {
String name = key.getName();
return !(name.equals("setUp") || name.equals("tearDown"));
}
})
.around(new SystemPropertiesInvariantRule(IGNORED_INVARIANT_PROPERTIES))
.around(classNameRule = new TestRuleStoreClassName())
.around(classEnvRule = new TestRuleSetupAndRestoreClassEnv());
// -----------------------------------------------------------------
// Test level rules.
// -----------------------------------------------------------------
/**
* Enforces {@link #setUp()} and {@link #tearDown()} calls are chained.
*/
private TestRuleSetupTeardownChained parentChainCallRule = new TestRuleSetupTeardownChained();
/**
* Save test thread and name.
*/
private TestRuleThreadAndTestName threadAndTestNameRule = new TestRuleThreadAndTestName();
/**
* Taint suite result with individual test failures.
*/
private TestRuleMarkFailure testFailureMarker = new TestRuleMarkFailure(suiteFailureMarker);
/**
* This controls how individual test rules are nested. It is important that
* _all_ rules declared in {@link LuceneTestCase} are executed in proper order
* if they depend on each other.
*/
@Rule
public final TestRule ruleChain = RuleChain
.outerRule(testFailureMarker)
.around(ignoreAfterMaxFailures)
.around(threadAndTestNameRule)
.around(new SystemPropertiesInvariantRule(IGNORED_INVARIANT_PROPERTIES))
.around(new TestRuleSetupAndRestoreInstanceEnv())
.around(new TestRuleFieldCacheSanity())
.around(parentChainCallRule);
// -----------------------------------------------------------------
// Suite and test case setup/ cleanup.
// -----------------------------------------------------------------
/**
* For subclasses to override. Overrides must call {@code super.setUp()}.
*/
@Before
public void setUp() throws Exception {
parentChainCallRule.setupCalled = true;
}
/**
* For subclasses to override. Overrides must call {@code super.tearDown()}.
*/
@After
public void tearDown() throws Exception {
parentChainCallRule.teardownCalled = true;
}
// -----------------------------------------------------------------
// Test facilities and facades for subclasses.
// -----------------------------------------------------------------
/**
* Registers a {@link Closeable} resource that should be closed after the test
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
public <T extends Closeable> T closeAfterTest(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.TEST);
}
/**
* Registers a {@link Closeable} resource that should be closed after the suite
* completes.
*
* @return <code>resource</code> (for call chaining).
*/
public static <T extends Closeable> T closeAfterSuite(T resource) {
return RandomizedContext.current().closeAtEnd(resource, LifecycleScope.SUITE);
}
/**
* Return the current class being tested.
*/
public static Class<?> getTestClass() {
return classNameRule.getTestClass();
}
/**
* Return the name of the currently executing test case.
*/
public String getTestName() {
return threadAndTestNameRule.testMethodName;
}
}
| 0true
|
src_test_java_org_apache_lucene_util_AbstractRandomizedTest.java
|
1,398 |
CollectionUtil.timSort(templates, new Comparator<IndexTemplateMetaData>() {
@Override
public int compare(IndexTemplateMetaData o1, IndexTemplateMetaData o2) {
return o2.order() - o1.order();
}
});
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MetaDataCreateIndexService.java
|
2,355 |
public class JobSupervisor {
private final ConcurrentMap<Object, Reducer> reducers = new ConcurrentHashMap<Object, Reducer>();
private final ConcurrentMap<Integer, Set<Address>> remoteReducers = new ConcurrentHashMap<Integer, Set<Address>>();
private final AtomicReference<DefaultContext> context = new AtomicReference<DefaultContext>();
private final ConcurrentMap<Object, Address> keyAssignments = new ConcurrentHashMap<Object, Address>();
private final Address jobOwner;
private final boolean ownerNode;
private final AbstractJobTracker jobTracker;
private final JobTaskConfiguration configuration;
private final MapReduceService mapReduceService;
private final ExecutorService executorService;
private final JobProcessInformationImpl jobProcessInformation;
public JobSupervisor(JobTaskConfiguration configuration, AbstractJobTracker jobTracker, boolean ownerNode,
MapReduceService mapReduceService) {
this.jobTracker = jobTracker;
this.ownerNode = ownerNode;
this.configuration = configuration;
this.mapReduceService = mapReduceService;
this.jobOwner = configuration.getJobOwner();
this.executorService = mapReduceService.getExecutorService(configuration.getName());
// Calculate partition count
this.jobProcessInformation = createJobProcessInformation(configuration, this);
// Preregister reducer task to handle immediate reducing events
String name = configuration.getName();
String jobId = configuration.getJobId();
jobTracker.registerReducerTask(new ReducerTask(name, jobId, this));
}
public MapReduceService getMapReduceService() {
return mapReduceService;
}
public JobTracker getJobTracker() {
return jobTracker;
}
public void startTasks(MappingPhase mappingPhase) {
// Start map-combiner tasks
jobTracker.registerMapCombineTask(new MapCombineTask(configuration, this, mappingPhase));
}
public void onNotification(MapReduceNotification notification) {
if (notification instanceof IntermediateChunkNotification) {
IntermediateChunkNotification icn = (IntermediateChunkNotification) notification;
ReducerTask reducerTask = jobTracker.getReducerTask(icn.getJobId());
reducerTask.processChunk(icn.getChunk());
} else if (notification instanceof LastChunkNotification) {
LastChunkNotification lcn = (LastChunkNotification) notification;
ReducerTask reducerTask = jobTracker.getReducerTask(lcn.getJobId());
reducerTask.processChunk(lcn.getPartitionId(), lcn.getSender(), lcn.getChunk());
} else if (notification instanceof ReducingFinishedNotification) {
ReducingFinishedNotification rfn = (ReducingFinishedNotification) notification;
processReducerFinished(rfn);
}
}
public void notifyRemoteException(Address remoteAddress, Throwable throwable) {
// Cancel all partition states
jobProcessInformation.cancelPartitionState();
// Notify all other nodes about cancellation
Set<Address> addresses = collectRemoteAddresses();
// Now notify all involved members to cancel the job
cancelRemoteOperations(addresses);
// Cancel local job
TrackableJobFuture future = cancel();
if (future != null) {
// Might be already cancelled by another members exception
ExceptionUtil.fixRemoteStackTrace(throwable, Thread.currentThread().getStackTrace(),
"Operation failed on node: " + remoteAddress);
future.setResult(throwable);
}
}
public boolean cancelAndNotify(Exception exception) {
// Cancel all partition states
jobProcessInformation.cancelPartitionState();
// Notify all other nodes about cancellation
Set<Address> addresses = collectRemoteAddresses();
// Now notify all involved members to cancel the job
cancelRemoteOperations(addresses);
// Cancel local job
TrackableJobFuture future = cancel();
if (future != null) {
// Might be already cancelled by another members exception
future.setResult(exception);
}
return true;
}
// TODO Not yet fully supported
public boolean cancelNotifyAndRestart() {
// Cancel all partition states
jobProcessInformation.cancelPartitionState();
// Notify all other nodes about cancellation
Set<Address> addresses = collectRemoteAddresses();
// Now notify all involved members to cancel the job
cancelRemoteOperations(addresses);
// Kill local tasks
String jobId = getConfiguration().getJobId();
MapCombineTask mapCombineTask = jobTracker.unregisterMapCombineTask(jobId);
if (mapCombineTask != null) {
mapCombineTask.cancel();
}
ReducerTask reducerTask = jobTracker.unregisterReducerTask(jobId);
if (reducerTask != null) {
reducerTask.cancel();
}
// Reset local data
jobProcessInformation.resetPartitionState();
reducers.clear();
remoteReducers.clear();
context.set(null);
keyAssignments.clear();
// Restart
// TODO restart with a new KeyValueJob
return true;
}
public TrackableJobFuture cancel() {
String jobId = getConfiguration().getJobId();
TrackableJobFuture future = jobTracker.unregisterTrackableJob(jobId);
MapCombineTask mapCombineTask = jobTracker.unregisterMapCombineTask(jobId);
if (mapCombineTask != null) {
mapCombineTask.cancel();
}
ReducerTask reducerTask = jobTracker.unregisterReducerTask(jobId);
if (reducerTask != null) {
reducerTask.cancel();
}
mapReduceService.destroyJobSupervisor(this);
return future;
}
public Map<Object, Object> getJobResults() {
Map<Object, Object> result;
if (configuration.getReducerFactory() != null) {
int mapsize = MapReduceUtil.mapSize(reducers.size());
result = new HashMap<Object, Object>(mapsize);
for (Map.Entry<Object, Reducer> entry : reducers.entrySet()) {
result.put(entry.getKey(), entry.getValue().finalizeReduce());
}
} else {
DefaultContext currentContext = context.get();
result = currentContext.finish();
}
return result;
}
public <KeyIn, ValueIn, ValueOut> Reducer<KeyIn, ValueIn, ValueOut> getReducerByKey(Object key) {
Reducer reducer = reducers.get(key);
if (reducer == null && configuration.getReducerFactory() != null) {
reducer = configuration.getReducerFactory().newReducer(key);
Reducer oldReducer = reducers.putIfAbsent(key, reducer);
if (oldReducer != null) {
reducer = oldReducer;
} else {
reducer.beginReduce(key);
}
}
return reducer;
}
public Address getReducerAddressByKey(Object key) {
Address address = keyAssignments.get(key);
if (address != null) {
return address;
}
return null;
}
public Address assignKeyReducerAddress(Object key) {
// Assign new key to a known member
Address address = keyAssignments.get(key);
if (address == null) {
address = mapReduceService.getKeyMember(key);
Address oldAddress = keyAssignments.putIfAbsent(key, address);
if (oldAddress != null) {
address = oldAddress;
}
}
return address;
}
public boolean checkAssignedMembersAvailable() {
return mapReduceService.checkAssignedMembersAvailable(keyAssignments.values());
}
public boolean assignKeyReducerAddress(Object key, Address address) {
Address oldAssignment = keyAssignments.putIfAbsent(key, address);
return oldAssignment == null || oldAssignment.equals(address);
}
public void checkFullyProcessed(JobProcessInformation processInformation) {
if (isOwnerNode()) {
JobPartitionState[] partitionStates = processInformation.getPartitionStates();
for (JobPartitionState partitionState : partitionStates) {
if (partitionState == null || partitionState.getState() != JobPartitionState.State.PROCESSED) {
return;
}
}
String name = configuration.getName();
String jobId = configuration.getJobId();
NodeEngine nodeEngine = configuration.getNodeEngine();
GetResultOperationFactory operationFactory = new GetResultOperationFactory(name, jobId);
List<Map> results = MapReduceUtil.executeOperation(operationFactory, mapReduceService, nodeEngine, true);
boolean reducedResult = configuration.getReducerFactory() != null;
if (results != null) {
Map<Object, Object> mergedResults = new HashMap<Object, Object>();
for (Map<?, ?> map : results) {
for (Map.Entry entry : map.entrySet()) {
collectResults(reducedResult, mergedResults, entry);
}
}
// Get the initial future object to eventually set the result and cleanup
TrackableJobFuture future = jobTracker.unregisterTrackableJob(jobId);
jobTracker.unregisterMapCombineTask(jobId);
jobTracker.unregisterReducerTask(jobId);
mapReduceService.destroyJobSupervisor(this);
future.setResult(mergedResults);
}
}
}
public <K, V> DefaultContext<K, V> getOrCreateContext(MapCombineTask mapCombineTask) {
DefaultContext<K, V> newContext = new DefaultContext<K, V>(configuration.getCombinerFactory(), mapCombineTask);
if (context.compareAndSet(null, newContext)) {
return newContext;
}
return context.get();
}
public void registerReducerEventInterests(int partitionId, Set<Address> remoteReducers) {
Set<Address> addresses = this.remoteReducers.get(partitionId);
if (addresses == null) {
addresses = new CopyOnWriteArraySet<Address>();
Set<Address> oldSet = this.remoteReducers.putIfAbsent(partitionId, addresses);
if (oldSet != null) {
addresses = oldSet;
}
}
addresses.addAll(remoteReducers);
}
public Collection<Address> getReducerEventInterests(int partitionId) {
return this.remoteReducers.get(partitionId);
}
public JobProcessInformationImpl getJobProcessInformation() {
return jobProcessInformation;
}
public Address getJobOwner() {
return jobOwner;
}
public boolean isOwnerNode() {
return ownerNode;
}
public JobTaskConfiguration getConfiguration() {
return configuration;
}
private void collectResults(boolean reducedResult, Map<Object, Object> mergedResults, Map.Entry entry) {
if (reducedResult) {
mergedResults.put(entry.getKey(), entry.getValue());
} else {
List<Object> list = (List) mergedResults.get(entry.getKey());
if (list == null) {
list = new ArrayList<Object>();
mergedResults.put(entry.getKey(), list);
}
for (Object value : (List) entry.getValue()) {
list.add(value);
}
}
}
private Set<Address> collectRemoteAddresses() {
Set<Address> addresses = new HashSet<Address>();
for (Set<Address> remoteReducerAddresses : remoteReducers.values()) {
addAllFilterJobOwner(addresses, remoteReducerAddresses);
}
for (JobPartitionState partitionState : jobProcessInformation.getPartitionStates()) {
if (partitionState != null && partitionState.getOwner() != null) {
if (!partitionState.getOwner().equals(jobOwner)) {
addresses.add(partitionState.getOwner());
}
}
}
return addresses;
}
private void cancelRemoteOperations(Set<Address> addresses) {
String name = getConfiguration().getName();
String jobId = getConfiguration().getJobId();
for (Address address : addresses) {
try {
CancelJobSupervisorOperation operation = new CancelJobSupervisorOperation(name, jobId);
mapReduceService.processRequest(address, operation, name);
} catch (Exception ignore) {
// We can ignore this exception since we just want to cancel the job
// and the member may be crashed or unreachable in some way
ILogger logger = mapReduceService.getNodeEngine().getLogger(JobSupervisor.class);
logger.finest("Remote node may already be down", ignore);
}
}
}
private void processReducerFinished(final ReducingFinishedNotification notification) {
// Just offload it to free the event queue
executorService.submit(new Runnable() {
@Override
public void run() {
processReducerFinished0(notification);
}
});
}
private void addAllFilterJobOwner(Set<Address> target, Set<Address> source) {
for (Address address : source) {
if (jobOwner.equals(address)) {
continue;
}
target.add(address);
}
}
private void processReducerFinished0(ReducingFinishedNotification notification) {
String name = configuration.getName();
String jobId = configuration.getJobId();
int partitionId = notification.getPartitionId();
Address reducerAddress = notification.getAddress();
if (checkPartitionReductionCompleted(partitionId, reducerAddress)) {
try {
RequestPartitionResult result = mapReduceService
.processRequest(jobOwner, new RequestPartitionProcessed(name, jobId, partitionId, REDUCING), name);
if (result.getResultState() != SUCCESSFUL) {
throw new RuntimeException("Could not finalize processing for partitionId " + partitionId);
}
} catch (Throwable t) {
MapReduceUtil.notifyRemoteException(this, t);
if (t instanceof Error) {
ExceptionUtil.sneakyThrow(t);
}
}
}
}
private boolean checkPartitionReductionCompleted(int partitionId, Address reducerAddress) {
Set<Address> remoteAddresses = remoteReducers.get(partitionId);
if (remoteAddresses == null) {
throw new RuntimeException("Reducer for partition " + partitionId + " not registered");
}
remoteAddresses.remove(reducerAddress);
if (remoteAddresses.size() == 0) {
if (remoteReducers.remove(partitionId) != null) {
return true;
}
}
return false;
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_mapreduce_impl_task_JobSupervisor.java
|
897 |
public abstract class TransportSearchTypeAction extends TransportAction<SearchRequest, SearchResponse> {
protected final ClusterService clusterService;
protected final SearchServiceTransportAction searchService;
protected final SearchPhaseController searchPhaseController;
public TransportSearchTypeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings, threadPool);
this.clusterService = clusterService;
this.searchService = searchService;
this.searchPhaseController = searchPhaseController;
}
protected abstract class BaseAsyncAction<FirstResult extends SearchPhaseResult> {
protected final ActionListener<SearchResponse> listener;
protected final GroupShardsIterator shardsIts;
protected final SearchRequest request;
protected final ClusterState clusterState;
protected final DiscoveryNodes nodes;
protected final int expectedSuccessfulOps;
private final int expectedTotalOps;
protected final AtomicInteger successulOps = new AtomicInteger();
private final AtomicInteger totalOps = new AtomicInteger();
protected final AtomicArray<FirstResult> firstResults;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final Object shardFailuresMutex = new Object();
protected volatile ScoreDoc[] sortedShardList;
protected final long startTime = System.currentTimeMillis();
protected BaseAsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.clusterState = clusterService.state();
nodes = clusterState.nodes();
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);
String[] concreteIndices = clusterState.metaData().concreteIndices(request.indices(), request.indicesOptions());
for (String index : concreteIndices) {
clusterState.blocks().indexBlockedRaiseException(ClusterBlockLevel.READ, index);
}
Map<String, Set<String>> routingMap = clusterState.metaData().resolveSearchRouting(request.routing(), request.indices());
shardsIts = clusterService.operationRouting().searchShards(clusterState, request.indices(), concreteIndices, routingMap, request.preference());
expectedSuccessfulOps = shardsIts.size();
// we need to add 1 for non active partition, since we count it in the total!
expectedTotalOps = shardsIts.totalSizeWith1ForEmpty();
firstResults = new AtomicArray<FirstResult>(shardsIts.size());
}
public void start() {
if (expectedSuccessfulOps == 0) {
// no search shards to search on, bail with empty response (it happens with search across _all with no indices around and consistent with broadcast operations)
listener.onResponse(new SearchResponse(InternalSearchResponse.EMPTY, null, 0, 0, System.currentTimeMillis() - startTime, ShardSearchFailure.EMPTY_ARRAY));
return;
}
request.beforeStart();
// count the local operations, and perform the non local ones
int localOperations = 0;
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
localOperations++;
} else {
// do the remote operation here, the localAsync flag is not relevant
performFirstPhase(shardIndex, shardIt);
}
} else {
// really, no shards active in this group
onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
}
}
// we have local operations, perform them now
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
request.beforeLocalFork();
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
int shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
performFirstPhase(shardIndex, shardIt);
}
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
if (localAsync) {
request.beforeLocalFork();
}
shardIndex = -1;
for (final ShardIterator shardIt : shardsIts) {
shardIndex++;
final int fShardIndex = shardIndex;
final ShardRouting shard = shardIt.firstOrNull();
if (shard != null) {
if (shard.currentNodeId().equals(nodes.localNodeId())) {
if (localAsync) {
try {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
performFirstPhase(fShardIndex, shardIt);
}
});
} catch (Throwable t) {
onFirstPhaseResult(shardIndex, shard, shard.currentNodeId(), shardIt, t);
}
} else {
performFirstPhase(fShardIndex, shardIt);
}
}
}
}
}
}
}
void performFirstPhase(final int shardIndex, final ShardIterator shardIt) {
performFirstPhase(shardIndex, shardIt, shardIt.nextOrNull());
}
void performFirstPhase(final int shardIndex, final ShardIterator shardIt, final ShardRouting shard) {
if (shard == null) {
// no more active shards... (we should not really get here, but just for safety)
onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
final DiscoveryNode node = nodes.get(shard.currentNodeId());
if (node == null) {
onFirstPhaseResult(shardIndex, shard, null, shardIt, new NoShardAvailableActionException(shardIt.shardId()));
} else {
String[] filteringAliases = clusterState.metaData().filteringAliases(shard.index(), request.indices());
sendExecuteFirstPhase(node, internalSearchRequest(shard, shardsIts.size(), request, filteringAliases, startTime), new SearchServiceListener<FirstResult>() {
@Override
public void onResult(FirstResult result) {
onFirstPhaseResult(shardIndex, shard, result, shardIt);
}
@Override
public void onFailure(Throwable t) {
onFirstPhaseResult(shardIndex, shard, node.id(), shardIt, t);
}
});
}
}
}
void onFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result, ShardIterator shardIt) {
result.shardTarget(new SearchShardTarget(shard.currentNodeId(), shard.index(), shard.id()));
processFirstPhaseResult(shardIndex, shard, result);
// increment all the "future" shards to update the total ops since we some may work and some may not...
// and when that happens, we break on total ops, so we must maintain them
int xTotalOps = totalOps.addAndGet(shardIt.remaining() + 1);
successulOps.incrementAndGet();
if (xTotalOps == expectedTotalOps) {
try {
innerMoveToSecondPhase();
} catch (Throwable e) {
if (logger.isDebugEnabled()) {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "] while moving to second phase", e);
}
listener.onFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
}
}
void onFirstPhaseResult(final int shardIndex, @Nullable ShardRouting shard, @Nullable String nodeId, final ShardIterator shardIt, Throwable t) {
// we always add the shard failure for a specific shard instance
// we do make sure to clean it on a successful response from a shard
SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId().getIndex(), shardIt.shardId().getId());
addShardFailure(shardIndex, shardTarget, t);
if (totalOps.incrementAndGet() == expectedTotalOps) {
if (logger.isDebugEnabled()) {
if (t != null && !TransportActions.isShardNotAvailableException(t)) {
if (shard != null) {
logger.debug(shard.shortSummary() + ": Failed to execute [" + request + "]", t);
} else {
logger.debug(shardIt.shardId() + ": Failed to execute [" + request + "]", t);
}
}
}
if (successulOps.get() == 0) {
if (logger.isDebugEnabled()) {
logger.debug("All shards failed for phase: [{}]", firstPhaseName(), t);
}
// no successful ops, raise an exception
listener.onFailure(new SearchPhaseExecutionException(firstPhaseName(), "all shards failed", buildShardFailures()));
} else {
try {
innerMoveToSecondPhase();
} catch (Throwable e) {
listener.onFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures()));
}
}
} else {
final ShardRouting nextShard = shardIt.nextOrNull();
final boolean lastShard = nextShard == null;
// trace log this exception
if (logger.isTraceEnabled() && t != null) {
logger.trace(executionFailureMsg(shard, shardIt, request, lastShard), t);
}
if (!lastShard) {
try {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
performFirstPhase(shardIndex, shardIt, nextShard);
}
});
} catch (Throwable t1) {
onFirstPhaseResult(shardIndex, shard, shard.currentNodeId(), shardIt, t1);
}
} else {
// no more shards active, add a failure
if (logger.isDebugEnabled() && !logger.isTraceEnabled()) { // do not double log this exception
if (t != null && !TransportActions.isShardNotAvailableException(t)) {
logger.debug(executionFailureMsg(shard, shardIt, request, lastShard), t);
}
}
}
}
}
private String executionFailureMsg(@Nullable ShardRouting shard, final ShardIterator shardIt, SearchRequest request, boolean lastShard) {
if (shard != null) {
return shard.shortSummary() + ": Failed to execute [" + request + "] lastShard [" + lastShard + "]";
} else {
return shardIt.shardId() + ": Failed to execute [" + request + "] lastShard [" + lastShard + "]";
}
}
/**
* Builds how long it took to execute the search.
*/
protected final long buildTookInMillis() {
return System.currentTimeMillis() - startTime;
}
protected final ShardSearchFailure[] buildShardFailures() {
AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures;
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
protected final void addShardFailure(final int shardIndex, @Nullable SearchShardTarget shardTarget, Throwable t) {
// we don't aggregate shard failures on non active shards (but do keep the header counts right)
if (TransportActions.isShardNotAvailableException(t)) {
return;
}
// lazily create shard failures, so we can early build the empty shard failure list in most cases (no failures)
if (shardFailures == null) {
synchronized (shardFailuresMutex) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(shardsIts.size());
}
}
}
ShardSearchFailure failure = shardFailures.get(shardIndex);
if (failure == null) {
shardFailures.set(shardIndex, new ShardSearchFailure(t, shardTarget));
} else {
// the failure is already present, try and not override it with an exception that is less meaningless
// for example, getting illegal shard state
if (TransportActions.isReadOverrideException(t)) {
shardFailures.set(shardIndex, new ShardSearchFailure(t, shardTarget));
}
}
}
/**
* Releases shard targets that are not used in the docsIdsToLoad.
*/
protected void releaseIrrelevantSearchContexts(AtomicArray<? extends QuerySearchResultProvider> queryResults,
AtomicArray<IntArrayList> docIdsToLoad) {
if (docIdsToLoad == null) {
return;
}
// we only release search context that we did not fetch from if we are not scrolling
if (request.scroll() == null) {
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults.asList()) {
if (docIdsToLoad.get(entry.index) == null) {
DiscoveryNode node = nodes.get(entry.value.queryResult().shardTarget().nodeId());
if (node != null) { // should not happen (==null) but safeguard anyhow
searchService.sendFreeContext(node, entry.value.queryResult().id(), request);
}
}
}
}
}
protected abstract void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchRequest request, SearchServiceListener<FirstResult> listener);
protected final void processFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result) {
firstResults.set(shardIndex, result);
// clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level
// so its ok concurrency wise to miss potentially the shard failures being created because of another failure
// in the #addShardFailure, because by definition, it will happen on *another* shardIndex
AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures;
if (shardFailures != null) {
shardFailures.set(shardIndex, null);
}
}
final void innerMoveToSecondPhase() throws Exception {
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder();
boolean hadOne = false;
for (int i = 0; i < firstResults.length(); i++) {
FirstResult result = firstResults.get(i);
if (result == null) {
continue; // failure
}
if (hadOne) {
sb.append(",");
} else {
hadOne = true;
}
sb.append(result.shardTarget());
}
logger.trace("Moving to second phase, based on results from: {} (cluster state version: {})", sb, clusterState.version());
}
moveToSecondPhase();
}
protected abstract void moveToSecondPhase() throws Exception;
protected abstract String firstPhaseName();
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_search_type_TransportSearchTypeAction.java
|
132 |
@SuppressWarnings("restriction")
public class OUnsafeBinaryConverter implements OBinaryConverter {
public static final OUnsafeBinaryConverter INSTANCE = new OUnsafeBinaryConverter();
private static final Unsafe theUnsafe;
private static final long BYTE_ARRAY_OFFSET;
static {
theUnsafe = (Unsafe) AccessController.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
Field f = Unsafe.class.getDeclaredField("theUnsafe");
boolean wasAccessible = f.isAccessible();
f.setAccessible(true);
try {
return f.get(null);
} finally {
f.setAccessible(wasAccessible);
}
} catch (NoSuchFieldException e) {
throw new Error();
} catch (IllegalAccessException e) {
throw new Error();
}
}
});
BYTE_ARRAY_OFFSET = theUnsafe.arrayBaseOffset(byte[].class);
}
public void putShort(byte[] buffer, int index, short value, ByteOrder byteOrder) {
if (!byteOrder.equals(ByteOrder.nativeOrder()))
value = Short.reverseBytes(value);
theUnsafe.putShort(buffer, index + BYTE_ARRAY_OFFSET, value);
}
public short getShort(byte[] buffer, int index, ByteOrder byteOrder) {
short result = theUnsafe.getShort(buffer, index + BYTE_ARRAY_OFFSET);
if (!byteOrder.equals(ByteOrder.nativeOrder()))
result = Short.reverseBytes(result);
return result;
}
public void putInt(byte[] buffer, int pointer, int value, ByteOrder byteOrder) {
final long position = pointer + BYTE_ARRAY_OFFSET;
if (!byteOrder.equals(ByteOrder.nativeOrder()))
value = Integer.reverseBytes(value);
theUnsafe.putInt(buffer, position, value);
}
public int getInt(byte[] buffer, int pointer, ByteOrder byteOrder) {
final long position = pointer + BYTE_ARRAY_OFFSET;
int result = theUnsafe.getInt(buffer, position);
if (!byteOrder.equals(ByteOrder.nativeOrder()))
result = Integer.reverseBytes(result);
return result;
}
public void putLong(byte[] buffer, int index, long value, ByteOrder byteOrder) {
if (!byteOrder.equals(ByteOrder.nativeOrder()))
value = Long.reverseBytes(value);
theUnsafe.putLong(buffer, index + BYTE_ARRAY_OFFSET, value);
}
public long getLong(byte[] buffer, int index, ByteOrder byteOrder) {
long result = theUnsafe.getLong(buffer, index + BYTE_ARRAY_OFFSET);
if (!byteOrder.equals(ByteOrder.nativeOrder()))
result = Long.reverseBytes(result);
return result;
}
public void putChar(byte[] buffer, int index, char character, ByteOrder byteOrder) {
if (!byteOrder.equals(ByteOrder.nativeOrder()))
character = Character.reverseBytes(character);
theUnsafe.putChar(buffer, index + BYTE_ARRAY_OFFSET, character);
}
public char getChar(byte[] buffer, int index, ByteOrder byteOrder) {
char result = theUnsafe.getChar(buffer, index + BYTE_ARRAY_OFFSET);
if (!byteOrder.equals(ByteOrder.nativeOrder()))
result = Character.reverseBytes(result);
return result;
}
public boolean nativeAccelerationUsed() {
return true;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_serialization_OUnsafeBinaryConverter.java
|
540 |
public class CreateTransactionRequest extends BaseTransactionRequest implements SecureRequest {
private TransactionOptions options;
private SerializableXID sXid;
public CreateTransactionRequest() {
}
public CreateTransactionRequest(TransactionOptions options, SerializableXID sXid) {
this.options = options;
this.sXid = sXid;
}
@Override
public Object innerCall() throws Exception {
ClientEngineImpl clientEngine = getService();
ClientEndpoint endpoint = getEndpoint();
TransactionManagerServiceImpl transactionManager =
(TransactionManagerServiceImpl) clientEngine.getTransactionManagerService();
TransactionContext context = transactionManager.newClientTransactionContext(options, endpoint.getUuid());
if (sXid != null) {
Transaction transaction = TransactionAccessor.getTransaction(context);
transactionManager.addManagedTransaction(sXid, transaction);
}
context.beginTransaction();
endpoint.setTransactionContext(context);
return context.getTxnId();
}
@Override
public String getServiceName() {
return ClientEngineImpl.SERVICE_NAME;
}
@Override
public int getFactoryId() {
return ClientTxnPortableHook.F_ID;
}
@Override
public int getClassId() {
return ClientTxnPortableHook.CREATE;
}
@Override
public void write(PortableWriter writer) throws IOException {
super.write(writer);
ObjectDataOutput out = writer.getRawDataOutput();
options.writeData(out);
out.writeBoolean(sXid != null);
if (sXid != null) {
sXid.writeData(out);
}
}
@Override
public void read(PortableReader reader) throws IOException {
super.read(reader);
ObjectDataInput in = reader.getRawDataInput();
options = new TransactionOptions();
options.readData(in);
boolean sXidNotNull = in.readBoolean();
if (sXidNotNull) {
sXid = new SerializableXID();
sXid.readData(in);
}
}
@Override
public Permission getRequiredPermission() {
return new TransactionPermission();
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_txn_CreateTransactionRequest.java
|
147 |
public class OBinaryTypeSerializer implements OBinarySerializer<byte[]> {
private static final OBinaryConverter CONVERTER = OBinaryConverterFactory.getConverter();
public static final OBinaryTypeSerializer INSTANCE = new OBinaryTypeSerializer();
public static final byte ID = 17;
public int getObjectSize(int length) {
return length + OIntegerSerializer.INT_SIZE;
}
public int getObjectSize(byte[] object, Object... hints) {
return object.length + OIntegerSerializer.INT_SIZE;
}
public void serialize(byte[] object, byte[] stream, int startPosition, Object... hints) {
int len = object.length;
OIntegerSerializer.INSTANCE.serialize(len, stream, startPosition);
System.arraycopy(object, 0, stream, startPosition + OIntegerSerializer.INT_SIZE, len);
}
public byte[] deserialize(byte[] stream, int startPosition) {
int len = OIntegerSerializer.INSTANCE.deserialize(stream, startPosition);
return Arrays.copyOfRange(stream, startPosition + OIntegerSerializer.INT_SIZE, startPosition + OIntegerSerializer.INT_SIZE
+ len);
}
public int getObjectSize(byte[] stream, int startPosition) {
return OIntegerSerializer.INSTANCE.deserialize(stream, startPosition) + OIntegerSerializer.INT_SIZE;
}
public int getObjectSizeNative(byte[] stream, int startPosition) {
return CONVERTER.getInt(stream, startPosition, ByteOrder.nativeOrder()) + OIntegerSerializer.INT_SIZE;
}
public void serializeNative(byte[] object, byte[] stream, int startPosition, Object... hints) {
int len = object.length;
CONVERTER.putInt(stream, startPosition, len, ByteOrder.nativeOrder());
System.arraycopy(object, 0, stream, startPosition + OIntegerSerializer.INT_SIZE, len);
}
public byte[] deserializeNative(byte[] stream, int startPosition) {
int len = CONVERTER.getInt(stream, startPosition, ByteOrder.nativeOrder());
return Arrays.copyOfRange(stream, startPosition + OIntegerSerializer.INT_SIZE, startPosition + OIntegerSerializer.INT_SIZE
+ len);
}
@Override
public void serializeInDirectMemory(byte[] object, ODirectMemoryPointer pointer, long offset, Object... hints) {
int len = object.length;
pointer.setInt(offset, len);
offset += OIntegerSerializer.INT_SIZE;
pointer.set(offset, object, 0, len);
}
@Override
public byte[] deserializeFromDirectMemory(ODirectMemoryPointer pointer, long offset) {
int len = pointer.getInt(offset);
offset += OIntegerSerializer.INT_SIZE;
return pointer.get(offset, len);
}
@Override
public int getObjectSizeInDirectMemory(ODirectMemoryPointer pointer, long offset) {
return pointer.getInt(offset) + OIntegerSerializer.INT_SIZE;
}
public byte getId() {
return ID;
}
public boolean isFixedLength() {
return false;
}
public int getFixedLength() {
return 0;
}
@Override
public byte[] preprocess(byte[] value, Object... hints) {
return value;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_serialization_types_OBinaryTypeSerializer.java
|
529 |
public class ClientTxnPortableHook implements PortableHook {
public static final int F_ID = FactoryIdHelper.getFactoryId(FactoryIdHelper.CLIENT_TXN_PORTABLE_FACTORY, -19);
public static final int CREATE = 1;
public static final int COMMIT = 2;
public static final int ROLLBACK = 3;
public static final int PREPARE = 4;
public static final int RECOVER_ALL = 5;
public static final int RECOVER = 6;
@Override
public int getFactoryId() {
return F_ID;
}
public PortableFactory createFactory() {
final PortableFactory factory = new PortableFactory() {
public Portable create(int classId) {
switch (classId) {
case CREATE:
return new CreateTransactionRequest();
case COMMIT:
return new CommitTransactionRequest();
case ROLLBACK:
return new RollbackTransactionRequest();
case PREPARE:
return new PrepareTransactionRequest();
case RECOVER_ALL:
return new RecoverAllTransactionsRequest();
case RECOVER:
return new RecoverTransactionRequest();
default:
return null;
}
}
};
return factory;
}
@Override
public Collection<ClassDefinition> getBuiltinDefinitions() {
return null;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_txn_ClientTxnPortableHook.java
|
750 |
public class TxnListSizeRequest extends TxnCollectionRequest {
public TxnListSizeRequest() {
}
public TxnListSizeRequest(String name) {
super(name);
}
@Override
public Object innerCall() throws Exception {
return getEndpoint().getTransactionContext(txnId).getList(name).size();
}
@Override
public String getServiceName() {
return ListService.SERVICE_NAME;
}
@Override
public int getClassId() {
return CollectionPortableHook.TXN_LIST_SIZE;
}
@Override
public Permission getRequiredPermission() {
return new ListPermission(name, ActionConstants.ACTION_READ);
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_collection_client_TxnListSizeRequest.java
|
1,054 |
return new TermsEnum() {
int currentTerm = 0;
int freq = 0;
int docFreq = -1;
long totalTermFrequency = -1;
int[] positions = new int[1];
int[] startOffsets = new int[1];
int[] endOffsets = new int[1];
BytesRef[] payloads = new BytesRef[1];
final BytesRef spare = new BytesRef();
@Override
public BytesRef next() throws IOException {
if (currentTerm++ < numTerms) {
// term string. first the size...
int termVectorSize = perFieldTermVectorInput.readVInt();
spare.grow(termVectorSize);
// ...then the value.
perFieldTermVectorInput.readBytes(spare.bytes, 0, termVectorSize);
spare.length = termVectorSize;
if (hasTermStatistic) {
docFreq = readPotentiallyNegativeVInt(perFieldTermVectorInput);
totalTermFrequency = readPotentiallyNegativeVLong(perFieldTermVectorInput);
}
freq = readPotentiallyNegativeVInt(perFieldTermVectorInput);
// grow the arrays to read the values. this is just
// for performance reasons. Re-use memory instead of
// realloc.
growBuffers();
// finally, read the values into the arrays
// curentPosition etc. so that we can just iterate
// later
writeInfos(perFieldTermVectorInput);
return spare;
} else {
return null;
}
}
private void writeInfos(final BytesStreamInput input) throws IOException {
for (int i = 0; i < freq; i++) {
if (hasPositions) {
positions[i] = input.readVInt();
}
if (hasOffsets) {
startOffsets[i] = input.readVInt();
endOffsets[i] = input.readVInt();
}
if (hasPayloads) {
int payloadLength = input.readVInt();
if (payloads[i] == null) {
payloads[i] = new BytesRef(payloadLength);
} else {
payloads[i].grow(payloadLength);
}
input.readBytes(payloads[i].bytes, 0, payloadLength);
payloads[i].length = payloadLength;
payloads[i].offset = 0;
}
}
}
private void growBuffers() {
if (hasPositions) {
positions = grow(positions, freq);
}
if (hasOffsets) {
startOffsets = grow(startOffsets, freq);
endOffsets = grow(endOffsets, freq);
}
if (hasPayloads) {
if (payloads.length < freq) {
final BytesRef[] newArray = new BytesRef[ArrayUtil.oversize(freq, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
System.arraycopy(payloads, 0, newArray, 0, payloads.length);
payloads = newArray;
}
}
}
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public SeekStatus seekCeil(BytesRef text) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void seekExact(long ord) throws IOException {
throw new UnsupportedOperationException("Seek is not supported");
}
@Override
public BytesRef term() throws IOException {
return spare;
}
@Override
public long ord() throws IOException {
throw new UnsupportedOperationException("ordinals are not supported");
}
@Override
public int docFreq() throws IOException {
return docFreq;
}
@Override
public long totalTermFreq() throws IOException {
return totalTermFrequency;
}
@Override
public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
return docsAndPositions(liveDocs, reuse instanceof DocsAndPositionsEnum ? (DocsAndPositionsEnum) reuse : null, 0);
}
@Override
public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) throws IOException {
final TermVectorsDocsAndPosEnum retVal = (reuse instanceof TermVectorsDocsAndPosEnum ? (TermVectorsDocsAndPosEnum) reuse
: new TermVectorsDocsAndPosEnum());
return retVal.reset(hasPositions ? positions : null, hasOffsets ? startOffsets : null, hasOffsets ? endOffsets
: null, hasPayloads ? payloads : null, freq);
}
};
| 0true
|
src_main_java_org_elasticsearch_action_termvector_TermVectorFields.java
|
368 |
public static class TimeConsumingMapper
implements Mapper<Integer, Integer, String, Integer> {
@Override
public void map(Integer key, Integer value, Context<String, Integer> collector) {
try {
Thread.sleep(1000);
} catch (Exception ignore) {
}
collector.emit(String.valueOf(key), value);
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_mapreduce_ClientMapReduceTest.java
|
354 |
public class SingleTableInheritanceInfo {
protected String className;
protected String discriminatorName;
protected DiscriminatorType discriminatorType;
protected int discriminatorLength;
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getDiscriminatorName() {
return discriminatorName;
}
public void setDiscriminatorName(String discriminatorName) {
this.discriminatorName = discriminatorName;
}
public DiscriminatorType getDiscriminatorType() {
return discriminatorType;
}
public void setDiscriminatorType(DiscriminatorType discriminatorType) {
this.discriminatorType = discriminatorType;
}
public int getDiscriminatorLength() {
return discriminatorLength;
}
public void setDiscriminatorLength(int discriminatorLength) {
this.discriminatorLength = discriminatorLength;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((className == null) ? 0 : className.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SingleTableInheritanceInfo other = (SingleTableInheritanceInfo) obj;
if (className == null) {
if (other.className != null)
return false;
} else if (!className.equals(other.className))
return false;
return true;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_extensibility_jpa_convert_inheritance_SingleTableInheritanceInfo.java
|
133 |
@Test
public class SafeConverterTest extends AbstractConverterTest {
@BeforeClass
public void beforeClass() {
converter = new OSafeBinaryConverter();
}
@Override
public void testPutIntBigEndian() {
super.testPutIntBigEndian();
}
@Override
public void testPutIntLittleEndian() {
super.testPutIntLittleEndian();
}
@Override
public void testPutLongBigEndian() {
super.testPutLongBigEndian();
}
@Override
public void testPutLongLittleEndian() {
super.testPutLongLittleEndian();
}
@Override
public void testPutShortBigEndian() {
super.testPutShortBigEndian();
}
@Override
public void testPutShortLittleEndian() {
super.testPutShortLittleEndian();
}
@Override
public void testPutCharBigEndian() {
super.testPutCharBigEndian();
}
@Override
public void testPutCharLittleEndian() {
super.testPutCharLittleEndian();
}
}
| 0true
|
commons_src_test_java_com_orientechnologies_common_serialization_SafeConverterTest.java
|
1,505 |
@SuppressWarnings("unchecked")
@PrivateApi
public final class HazelcastInstanceImpl
implements HazelcastInstance {
public final Node node;
final ILogger logger;
final String name;
final ManagementService managementService;
final LifecycleServiceImpl lifecycleService;
final ManagedContext managedContext;
final ThreadMonitoringService threadMonitoringService;
final ThreadGroup threadGroup;
final ConcurrentMap<String, Object> userContext = new ConcurrentHashMap<String, Object>();
HazelcastInstanceImpl(String name, Config config, NodeContext nodeContext)
throws Exception {
this.name = name;
this.threadGroup = new ThreadGroup(name);
threadMonitoringService = new ThreadMonitoringService(threadGroup);
lifecycleService = new LifecycleServiceImpl(this);
ManagedContext configuredManagedContext = config.getManagedContext();
managedContext = new HazelcastManagedContext(this, configuredManagedContext);
//we are going to copy the user-context map of the Config so that each HazelcastInstance will get its own
//user-context map instance instead of having a shared map instance. So changes made to the user-context map
//in one HazelcastInstance will not reflect on other the user-context of other HazelcastInstances.
userContext.putAll(config.getUserContext());
node = new Node(this, config, nodeContext);
logger = node.getLogger(getClass().getName());
lifecycleService.fireLifecycleEvent(STARTING);
node.start();
if (!node.isActive()) {
node.connectionManager.shutdown();
throw new IllegalStateException("Node failed to start!");
}
managementService = new ManagementService(this);
if (configuredManagedContext != null) {
if (configuredManagedContext instanceof HazelcastInstanceAware) {
((HazelcastInstanceAware) configuredManagedContext).setHazelcastInstance(this);
}
}
initHealthMonitor();
}
private void initHealthMonitor() {
String healthMonitorLevelString = node.getGroupProperties().HEALTH_MONITORING_LEVEL.getString();
HealthMonitorLevel healthLevel = HealthMonitorLevel.valueOf(healthMonitorLevelString);
if (healthLevel != HealthMonitorLevel.OFF) {
logger.finest("Starting health monitor");
int delaySeconds = node.getGroupProperties().HEALTH_MONITORING_DELAY_SECONDS.getInteger();
new HealthMonitor(this, healthLevel, delaySeconds).start();
}
}
public ManagementService getManagementService() {
return managementService;
}
@Override
public String getName() {
return name;
}
@Override
public <K, V> IMap<K, V> getMap(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a map instance with a null name is not allowed!");
}
return getDistributedObject(MapService.SERVICE_NAME, name);
}
@Override
public <E> IQueue<E> getQueue(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a queue instance with a null name is not allowed!");
}
return getDistributedObject(QueueService.SERVICE_NAME, name);
}
@Override
public <E> ITopic<E> getTopic(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a topic instance with a null name is not allowed!");
}
return getDistributedObject(TopicService.SERVICE_NAME, name);
}
@Override
public <E> ISet<E> getSet(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a set instance with a null name is not allowed!");
}
return getDistributedObject(SetService.SERVICE_NAME, name);
}
@Override
public <E> IList<E> getList(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a list instance with a null name is not allowed!");
}
return getDistributedObject(ListService.SERVICE_NAME, name);
}
@Override
public <K, V> MultiMap<K, V> getMultiMap(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a multi-map instance with a null name is not allowed!");
}
return getDistributedObject(MultiMapService.SERVICE_NAME, name);
}
@Override
public JobTracker getJobTracker(String name) {
if (name == null) {
throw new NullPointerException("Retrieving a job tracker instance with a null name is not allowed!");
}
return getDistributedObject(MapReduceService.SERVICE_NAME, name);
}
@Deprecated
public ILock getLock(Object key) {
//this method will be deleted in the near future.
if (key == null) {
throw new NullPointerException("Retrieving a lock instance with a null key is not allowed!");
}
String name = LockProxy.convertToStringKey(key, node.getSerializationService());
return getLock(name);
}
@Override
public ILock getLock(String key) {
if (key == null) {
throw new NullPointerException("Retrieving a lock instance with a null key is not allowed!");
}
return getDistributedObject(LockService.SERVICE_NAME, key);
}
@Override
public <T> T executeTransaction(TransactionalTask<T> task)
throws TransactionException {
return executeTransaction(TransactionOptions.getDefault(), task);
}
@Override
public <T> T executeTransaction(TransactionOptions options, TransactionalTask<T> task)
throws TransactionException {
TransactionManagerService transactionManagerService = node.nodeEngine.getTransactionManagerService();
return transactionManagerService.executeTransaction(options, task);
}
@Override
public TransactionContext newTransactionContext() {
return newTransactionContext(TransactionOptions.getDefault());
}
@Override
public TransactionContext newTransactionContext(TransactionOptions options) {
TransactionManagerService transactionManagerService = node.nodeEngine.getTransactionManagerService();
return transactionManagerService.newTransactionContext(options);
}
@Override
public IExecutorService getExecutorService(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving an executor instance with a null name is not allowed!");
}
return getDistributedObject(DistributedExecutorService.SERVICE_NAME, name);
}
@Override
public IdGenerator getIdGenerator(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving an id-generator instance with a null name is not allowed!");
}
return getDistributedObject(IdGeneratorService.SERVICE_NAME, name);
}
@Override
public IAtomicLong getAtomicLong(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving an atomic-long instance with a null name is not allowed!");
}
return getDistributedObject(AtomicLongService.SERVICE_NAME, name);
}
@Override
public <E> IAtomicReference<E> getAtomicReference(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving an atomic-reference instance with a null name is not allowed!");
}
return getDistributedObject(AtomicReferenceService.SERVICE_NAME, name);
}
@Override
public ICountDownLatch getCountDownLatch(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving a countdown-latch instance with a null name is not allowed!");
}
return getDistributedObject(CountDownLatchService.SERVICE_NAME, name);
}
@Override
public ISemaphore getSemaphore(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving a semaphore instance with a null name is not allowed!");
}
return getDistributedObject(SemaphoreService.SERVICE_NAME, name);
}
@Override
public <K, V> ReplicatedMap<K, V> getReplicatedMap(final String name) {
if (name == null) {
throw new NullPointerException("Retrieving a replicated map instance with a null name is not allowed!");
}
return getDistributedObject(ReplicatedMapService.SERVICE_NAME, name);
}
@Override
public Cluster getCluster() {
return node.clusterService.getClusterProxy();
}
@Override
public Member getLocalEndpoint() {
return node.clusterService.getLocalMember();
}
@Override
public Collection<DistributedObject> getDistributedObjects() {
ProxyService proxyService = node.nodeEngine.getProxyService();
return proxyService.getAllDistributedObjects();
}
@Override
public Config getConfig() {
return node.getConfig();
}
@Override
public ConcurrentMap<String, Object> getUserContext() {
return userContext;
}
@Override
public PartitionService getPartitionService() {
return node.partitionService.getPartitionServiceProxy();
}
@Override
public ClientService getClientService() {
return node.clientEngine.getClientService();
}
@Override
public LoggingService getLoggingService() {
return node.loggingService;
}
@Override
public LifecycleServiceImpl getLifecycleService() {
return lifecycleService;
}
@Override
public void shutdown() {
getLifecycleService().shutdown();
}
@Override
@Deprecated
public <T extends DistributedObject> T getDistributedObject(String serviceName, Object id) {
if (id instanceof String) {
return (T) node.nodeEngine.getProxyService().getDistributedObject(serviceName, (String) id);
}
throw new IllegalArgumentException("'id' must be type of String!");
}
@Override
public <T extends DistributedObject> T getDistributedObject(String serviceName, String name) {
ProxyService proxyService = node.nodeEngine.getProxyService();
return (T) proxyService.getDistributedObject(serviceName, name);
}
@Override
public String addDistributedObjectListener(DistributedObjectListener distributedObjectListener) {
final ProxyService proxyService = node.nodeEngine.getProxyService();
return proxyService.addProxyListener(distributedObjectListener);
}
@Override
public boolean removeDistributedObjectListener(String registrationId) {
final ProxyService proxyService = node.nodeEngine.getProxyService();
return proxyService.removeProxyListener(registrationId);
}
public ThreadGroup getThreadGroup() {
return threadGroup;
}
public SerializationService getSerializationService() {
return node.getSerializationService();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || !(o instanceof HazelcastInstance)) {
return false;
}
HazelcastInstance that = (HazelcastInstance) o;
return !(name != null ? !name.equals(that.getName()) : that.getName() != null);
}
@Override
public int hashCode() {
return name != null ? name.hashCode() : 0;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("HazelcastInstance");
sb.append("{name='").append(name).append('\'');
sb.append(", node=").append(node.getThisAddress());
sb.append('}');
return sb.toString();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_instance_HazelcastInstanceImpl.java
|
1,152 |
public class NodesStressTest {
private Node[] nodes;
private int numberOfNodes = 2;
private Client[] clients;
private AtomicLong idGenerator = new AtomicLong();
private int fieldNumLimit = 50;
private long searcherIterations = 10;
private Searcher[] searcherThreads = new Searcher[1];
private long indexIterations = 10;
private Indexer[] indexThreads = new Indexer[1];
private TimeValue sleepAfterDone = TimeValue.timeValueMillis(0);
private TimeValue sleepBeforeClose = TimeValue.timeValueMillis(0);
private CountDownLatch latch;
private CyclicBarrier barrier1;
private CyclicBarrier barrier2;
public NodesStressTest() {
}
public NodesStressTest numberOfNodes(int numberOfNodes) {
this.numberOfNodes = numberOfNodes;
return this;
}
public NodesStressTest fieldNumLimit(int fieldNumLimit) {
this.fieldNumLimit = fieldNumLimit;
return this;
}
public NodesStressTest searchIterations(int searchIterations) {
this.searcherIterations = searchIterations;
return this;
}
public NodesStressTest searcherThreads(int numberOfSearcherThreads) {
searcherThreads = new Searcher[numberOfSearcherThreads];
return this;
}
public NodesStressTest indexIterations(long indexIterations) {
this.indexIterations = indexIterations;
return this;
}
public NodesStressTest indexThreads(int numberOfWriterThreads) {
indexThreads = new Indexer[numberOfWriterThreads];
return this;
}
public NodesStressTest sleepAfterDone(TimeValue time) {
this.sleepAfterDone = time;
return this;
}
public NodesStressTest sleepBeforeClose(TimeValue time) {
this.sleepBeforeClose = time;
return this;
}
public NodesStressTest build(Settings settings) throws Exception {
settings = settingsBuilder()
// .put("index.refresh_interval", 1, TimeUnit.SECONDS)
.put(SETTING_NUMBER_OF_SHARDS, 5)
.put(SETTING_NUMBER_OF_REPLICAS, 1)
.put(settings)
.build();
nodes = new Node[numberOfNodes];
clients = new Client[numberOfNodes];
for (int i = 0; i < numberOfNodes; i++) {
nodes[i] = nodeBuilder().settings(settingsBuilder().put(settings).put("name", "node" + i)).node();
clients[i] = nodes[i].client();
}
for (int i = 0; i < searcherThreads.length; i++) {
searcherThreads[i] = new Searcher(i);
}
for (int i = 0; i < indexThreads.length; i++) {
indexThreads[i] = new Indexer(i);
}
latch = new CountDownLatch(1);
barrier1 = new CyclicBarrier(2);
barrier2 = new CyclicBarrier(2);
// warmup
StopWatch stopWatch = new StopWatch().start();
Indexer warmup = new Indexer(-1).max(10000);
warmup.start();
barrier1.await();
barrier2.await();
latch.await();
stopWatch.stop();
System.out.println("Done Warmup, took [" + stopWatch.totalTime() + "]");
latch = new CountDownLatch(searcherThreads.length + indexThreads.length);
barrier1 = new CyclicBarrier(searcherThreads.length + indexThreads.length + 1);
barrier2 = new CyclicBarrier(searcherThreads.length + indexThreads.length + 1);
return this;
}
public void start() throws Exception {
for (Thread t : searcherThreads) {
t.start();
}
for (Thread t : indexThreads) {
t.start();
}
barrier1.await();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
barrier2.await();
latch.await();
stopWatch.stop();
System.out.println("Done, took [" + stopWatch.totalTime() + "]");
System.out.println("Sleeping before close: " + sleepBeforeClose);
Thread.sleep(sleepBeforeClose.millis());
for (Client client : clients) {
client.close();
}
for (Node node : nodes) {
node.close();
}
System.out.println("Sleeping before exit: " + sleepBeforeClose);
Thread.sleep(sleepAfterDone.millis());
}
class Searcher extends Thread {
final int id;
long counter = 0;
long max = searcherIterations;
Searcher(int id) {
super("Searcher" + id);
this.id = id;
}
@Override
public void run() {
try {
barrier1.await();
barrier2.await();
for (; counter < max; counter++) {
Client client = client(counter);
QueryBuilder query = termQuery("num", counter % fieldNumLimit);
query = constantScoreQuery(queryFilter(query));
SearchResponse search = client.search(searchRequest()
.source(searchSource().query(query)))
.actionGet();
// System.out.println("Got search response, hits [" + search.hits().totalHits() + "]");
}
} catch (Exception e) {
System.err.println("Failed to search:");
e.printStackTrace();
} finally {
latch.countDown();
}
}
}
class Indexer extends Thread {
final int id;
long counter = 0;
long max = indexIterations;
Indexer(int id) {
super("Indexer" + id);
this.id = id;
}
Indexer max(int max) {
this.max = max;
return this;
}
@Override
public void run() {
try {
barrier1.await();
barrier2.await();
for (; counter < max; counter++) {
Client client = client(counter);
long id = idGenerator.incrementAndGet();
client.index(Requests.indexRequest().index("test").type("type1").id(Long.toString(id))
.source(XContentFactory.jsonBuilder().startObject()
.field("num", id % fieldNumLimit)
.endObject()))
.actionGet();
}
System.out.println("Indexer [" + id + "]: Done");
} catch (Exception e) {
System.err.println("Failed to index:");
e.printStackTrace();
} finally {
latch.countDown();
}
}
}
private Client client(long i) {
return clients[((int) (i % clients.length))];
}
public static void main(String[] args) throws Exception {
NodesStressTest test = new NodesStressTest()
.numberOfNodes(2)
.indexThreads(5)
.indexIterations(10 * 1000)
.searcherThreads(5)
.searchIterations(10 * 1000)
.sleepBeforeClose(TimeValue.timeValueMinutes(10))
.sleepAfterDone(TimeValue.timeValueMinutes(10))
.build(EMPTY_SETTINGS);
test.start();
}
}
| 0true
|
src_test_java_org_elasticsearch_benchmark_stress_NodesStressTest.java
|
3 |
Collection<Long> perm2Ids = BLCCollectionUtils.collect(perm2, new TypedTransformer<Long>() {
@Override
public Long transform(Object input) {
return ((ProductOptionValue) input).getId();
}
});
| 0true
|
admin_broadleaf-admin-module_src_main_java_org_broadleafcommerce_admin_server_service_AdminCatalogServiceImpl.java
|
762 |
public class TransportMultiGetAction extends TransportAction<MultiGetRequest, MultiGetResponse> {
private final ClusterService clusterService;
private final TransportShardMultiGetAction shardAction;
@Inject
public TransportMultiGetAction(Settings settings, ThreadPool threadPool, TransportService transportService, ClusterService clusterService, TransportShardMultiGetAction shardAction) {
super(settings, threadPool);
this.clusterService = clusterService;
this.shardAction = shardAction;
transportService.registerHandler(MultiGetAction.NAME, new TransportHandler());
}
@Override
protected void doExecute(final MultiGetRequest request, final ActionListener<MultiGetResponse> listener) {
ClusterState clusterState = clusterService.state();
clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ);
final AtomicArray<MultiGetItemResponse> responses = new AtomicArray<MultiGetItemResponse>(request.items.size());
Map<ShardId, MultiGetShardRequest> shardRequests = new HashMap<ShardId, MultiGetShardRequest>();
for (int i = 0; i < request.items.size(); i++) {
MultiGetRequest.Item item = request.items.get(i);
if (!clusterState.metaData().hasConcreteIndex(item.index())) {
responses.set(i, new MultiGetItemResponse(null, new MultiGetResponse.Failure(item.index(), item.type(), item.id(), "[" + item.index() + "] missing")));
continue;
}
if (item.routing() == null && clusterState.getMetaData().routingRequired(item.index(), item.type())) {
responses.set(i, new MultiGetItemResponse(null, new MultiGetResponse.Failure(item.index(), item.type(), item.id(), "routing is required, but hasn't been specified")));
continue;
}
item.routing(clusterState.metaData().resolveIndexRouting(item.routing(), item.index()));
item.index(clusterState.metaData().concreteIndex(item.index()));
ShardId shardId = clusterService.operationRouting()
.getShards(clusterState, item.index(), item.type(), item.id(), item.routing(), null).shardId();
MultiGetShardRequest shardRequest = shardRequests.get(shardId);
if (shardRequest == null) {
shardRequest = new MultiGetShardRequest(shardId.index().name(), shardId.id());
shardRequest.preference(request.preference);
shardRequest.realtime(request.realtime);
shardRequest.refresh(request.refresh);
shardRequests.put(shardId, shardRequest);
}
shardRequest.add(i, item.type(), item.id(), item.fields(), item.version(), item.versionType(), item.fetchSourceContext());
}
if (shardRequests.size() == 0) {
// only failures..
listener.onResponse(new MultiGetResponse(responses.toArray(new MultiGetItemResponse[responses.length()])));
}
final AtomicInteger counter = new AtomicInteger(shardRequests.size());
for (final MultiGetShardRequest shardRequest : shardRequests.values()) {
shardAction.execute(shardRequest, new ActionListener<MultiGetShardResponse>() {
@Override
public void onResponse(MultiGetShardResponse response) {
for (int i = 0; i < response.locations.size(); i++) {
responses.set(response.locations.get(i), new MultiGetItemResponse(response.responses.get(i), response.failures.get(i)));
}
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable e) {
// create failures for all relevant requests
String message = ExceptionsHelper.detailedMessage(e);
for (int i = 0; i < shardRequest.locations.size(); i++) {
responses.set(shardRequest.locations.get(i), new MultiGetItemResponse(null,
new MultiGetResponse.Failure(shardRequest.index(), shardRequest.types.get(i), shardRequest.ids.get(i), message)));
}
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
listener.onResponse(new MultiGetResponse(responses.toArray(new MultiGetItemResponse[responses.length()])));
}
});
}
}
class TransportHandler extends BaseTransportRequestHandler<MultiGetRequest> {
@Override
public MultiGetRequest newInstance() {
return new MultiGetRequest();
}
@Override
public void messageReceived(final MultiGetRequest request, final TransportChannel channel) throws Exception {
// no need to use threaded listener, since we just send a response
request.listenerThreaded(false);
execute(request, new ActionListener<MultiGetResponse>() {
@Override
public void onResponse(MultiGetResponse response) {
try {
channel.sendResponse(response);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("Failed to send error response for action [" + MultiGetAction.NAME + "] and request [" + request + "]", e1);
}
}
});
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_get_TransportMultiGetAction.java
|
1,387 |
public static class Routing {
public static final Routing EMPTY = new Routing(false, null);
private final boolean required;
private final String path;
private final String[] pathElements;
public Routing(boolean required, String path) {
this.required = required;
this.path = path;
if (path == null) {
pathElements = Strings.EMPTY_ARRAY;
} else {
pathElements = Strings.delimitedListToStringArray(path, ".");
}
}
public boolean required() {
return required;
}
public boolean hasPath() {
return path != null;
}
public String path() {
return this.path;
}
public String[] pathElements() {
return this.pathElements;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Routing routing = (Routing) o;
if (required != routing.required) return false;
if (path != null ? !path.equals(routing.path) : routing.path != null) return false;
if (!Arrays.equals(pathElements, routing.pathElements)) return false;
return true;
}
@Override
public int hashCode() {
int result = (required ? 1 : 0);
result = 31 * result + (path != null ? path.hashCode() : 0);
result = 31 * result + (pathElements != null ? Arrays.hashCode(pathElements) : 0);
return result;
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_MappingMetaData.java
|
106 |
public static class Order {
public static final int Basic = 1000;
public static final int Page = 2000;
public static final int Rules = 1000;
}
| 0true
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageImpl.java
|
997 |
execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Throwable e1) {
logger.warn("Failed to send response for " + transportAction, e1);
}
}
});
| 0true
|
src_main_java_org_elasticsearch_action_support_replication_TransportShardReplicationOperationAction.java
|
143 |
public class GetDistributedObjectsRequest extends ClientRequest {
@Override
void process() throws Exception {
ClientEndpoint endpoint = getEndpoint();
Collection<DistributedObject> distributedObjects = clientEngine.getProxyService().getAllDistributedObjects();
SerializationService serializationService = clientEngine.getSerializationService();
List<Data> dataArrayList = new ArrayList<Data>(distributedObjects.size());
for (DistributedObject distributedObject : distributedObjects) {
DistributedObjectInfo distributedObjectInfo = new DistributedObjectInfo(
distributedObject.getServiceName(), distributedObject.getName());
Data data = serializationService.toData(distributedObjectInfo);
dataArrayList.add(data);
}
SerializableCollection collection = new SerializableCollection(dataArrayList);
endpoint.sendResponse(collection, getCallId());
}
@Override
public String getServiceName() {
return ClientEngineImpl.SERVICE_NAME;
}
public int getFactoryId() {
return ClientPortableHook.ID;
}
public int getClassId() {
return ClientPortableHook.GET_DISTRIBUTED_OBJECT_INFO;
}
@Override
public Permission getRequiredPermission() {
return null;
}
}
| 0true
|
hazelcast_src_main_java_com_hazelcast_client_GetDistributedObjectsRequest.java
|
71 |
{
@Override
public TransactionState create( Transaction tx )
{
return TransactionState.NO_STATE;
}
};
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_TransactionStateFactory.java
|
6,202 |
public class RandomizingClient implements InternalClient {
private final SearchType defaultSearchType;
private final InternalClient delegate;
public RandomizingClient(InternalClient client, Random random) {
this.delegate = client;
// we don't use the QUERY_AND_FETCH types that break quite a lot of tests
// given that they return `size*num_shards` hits instead of `size`
defaultSearchType = RandomPicks.randomFrom(random, Arrays.asList(
SearchType.DFS_QUERY_THEN_FETCH,
SearchType.QUERY_THEN_FETCH));
}
@Override
public void close() {
delegate.close();
}
@Override
public AdminClient admin() {
return delegate.admin();
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response> execute(
Action<Request, Response, RequestBuilder> action, Request request) {
return delegate.execute(action, request);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void execute(
Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
delegate.execute(action, request, listener);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder prepareExecute(
Action<Request, Response, RequestBuilder> action) {
return delegate.prepareExecute(action);
}
@Override
public ActionFuture<IndexResponse> index(IndexRequest request) {
return delegate.index(request);
}
@Override
public void index(IndexRequest request, ActionListener<IndexResponse> listener) {
delegate.index(request, listener);
}
@Override
public IndexRequestBuilder prepareIndex() {
return delegate.prepareIndex();
}
@Override
public ActionFuture<UpdateResponse> update(UpdateRequest request) {
return delegate.update(request);
}
@Override
public void update(UpdateRequest request, ActionListener<UpdateResponse> listener) {
delegate.update(request, listener);
}
@Override
public UpdateRequestBuilder prepareUpdate() {
return delegate.prepareUpdate();
}
@Override
public UpdateRequestBuilder prepareUpdate(String index, String type, String id) {
return delegate.prepareUpdate(index, type, id);
}
@Override
public IndexRequestBuilder prepareIndex(String index, String type) {
return delegate.prepareIndex(index, type);
}
@Override
public IndexRequestBuilder prepareIndex(String index, String type, String id) {
return delegate.prepareIndex(index, type, id);
}
@Override
public ActionFuture<DeleteResponse> delete(DeleteRequest request) {
return delegate.delete(request);
}
@Override
public void delete(DeleteRequest request, ActionListener<DeleteResponse> listener) {
delegate.delete(request, listener);
}
@Override
public DeleteRequestBuilder prepareDelete() {
return delegate.prepareDelete();
}
@Override
public DeleteRequestBuilder prepareDelete(String index, String type, String id) {
return delegate.prepareDelete(index, type, id);
}
@Override
public ActionFuture<BulkResponse> bulk(BulkRequest request) {
return delegate.bulk(request);
}
@Override
public void bulk(BulkRequest request, ActionListener<BulkResponse> listener) {
delegate.bulk(request, listener);
}
@Override
public BulkRequestBuilder prepareBulk() {
return delegate.prepareBulk();
}
@Override
public ActionFuture<DeleteByQueryResponse> deleteByQuery(DeleteByQueryRequest request) {
return delegate.deleteByQuery(request);
}
@Override
public void deleteByQuery(DeleteByQueryRequest request, ActionListener<DeleteByQueryResponse> listener) {
delegate.deleteByQuery(request, listener);
}
@Override
public DeleteByQueryRequestBuilder prepareDeleteByQuery(String... indices) {
return delegate.prepareDeleteByQuery(indices);
}
@Override
public ActionFuture<GetResponse> get(GetRequest request) {
return delegate.get(request);
}
@Override
public void get(GetRequest request, ActionListener<GetResponse> listener) {
delegate.get(request, listener);
}
@Override
public GetRequestBuilder prepareGet() {
return delegate.prepareGet();
}
@Override
public GetRequestBuilder prepareGet(String index, String type, String id) {
return delegate.prepareGet(index, type, id);
}
@Override
public ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request) {
return delegate.multiGet(request);
}
@Override
public void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener) {
delegate.multiGet(request, listener);
}
@Override
public MultiGetRequestBuilder prepareMultiGet() {
return delegate.prepareMultiGet();
}
@Override
public ActionFuture<CountResponse> count(CountRequest request) {
return delegate.count(request);
}
@Override
public void count(CountRequest request, ActionListener<CountResponse> listener) {
delegate.count(request, listener);
}
@Override
public CountRequestBuilder prepareCount(String... indices) {
return delegate.prepareCount(indices);
}
@Override
public ActionFuture<SuggestResponse> suggest(SuggestRequest request) {
return delegate.suggest(request);
}
@Override
public void suggest(SuggestRequest request, ActionListener<SuggestResponse> listener) {
delegate.suggest(request, listener);
}
@Override
public SuggestRequestBuilder prepareSuggest(String... indices) {
return delegate.prepareSuggest(indices);
}
@Override
public ActionFuture<SearchResponse> search(SearchRequest request) {
return delegate.search(request);
}
@Override
public void search(SearchRequest request, ActionListener<SearchResponse> listener) {
delegate.search(request, listener);
}
@Override
public SearchRequestBuilder prepareSearch(String... indices) {
return delegate.prepareSearch(indices).setSearchType(defaultSearchType);
}
@Override
public ActionFuture<SearchResponse> searchScroll(SearchScrollRequest request) {
return delegate.searchScroll(request);
}
@Override
public void searchScroll(SearchScrollRequest request, ActionListener<SearchResponse> listener) {
delegate.searchScroll(request, listener);
}
@Override
public SearchScrollRequestBuilder prepareSearchScroll(String scrollId) {
return delegate.prepareSearchScroll(scrollId);
}
@Override
public ActionFuture<MultiSearchResponse> multiSearch(MultiSearchRequest request) {
return delegate.multiSearch(request);
}
@Override
public void multiSearch(MultiSearchRequest request, ActionListener<MultiSearchResponse> listener) {
delegate.multiSearch(request, listener);
}
@Override
public MultiSearchRequestBuilder prepareMultiSearch() {
return delegate.prepareMultiSearch();
}
@Override
public ActionFuture<SearchResponse> moreLikeThis(MoreLikeThisRequest request) {
return delegate.moreLikeThis(request);
}
@Override
public void moreLikeThis(MoreLikeThisRequest request, ActionListener<SearchResponse> listener) {
delegate.moreLikeThis(request, listener);
}
@Override
public MoreLikeThisRequestBuilder prepareMoreLikeThis(String index, String type, String id) {
return delegate.prepareMoreLikeThis(index, type, id);
}
@Override
public ActionFuture<TermVectorResponse> termVector(TermVectorRequest request) {
return delegate.termVector(request);
}
@Override
public void termVector(TermVectorRequest request, ActionListener<TermVectorResponse> listener) {
delegate.termVector(request, listener);
}
@Override
public TermVectorRequestBuilder prepareTermVector(String index, String type, String id) {
return delegate.prepareTermVector(index, type, id);
}
@Override
public ActionFuture<MultiTermVectorsResponse> multiTermVectors(MultiTermVectorsRequest request) {
return delegate.multiTermVectors(request);
}
@Override
public void multiTermVectors(MultiTermVectorsRequest request, ActionListener<MultiTermVectorsResponse> listener) {
delegate.multiTermVectors(request, listener);
}
@Override
public MultiTermVectorsRequestBuilder prepareMultiTermVectors() {
return delegate.prepareMultiTermVectors();
}
@Override
public ActionFuture<PercolateResponse> percolate(PercolateRequest request) {
return delegate.percolate(request);
}
@Override
public void percolate(PercolateRequest request, ActionListener<PercolateResponse> listener) {
delegate.percolate(request, listener);
}
@Override
public PercolateRequestBuilder preparePercolate() {
return delegate.preparePercolate();
}
@Override
public ActionFuture<MultiPercolateResponse> multiPercolate(MultiPercolateRequest request) {
return delegate.multiPercolate(request);
}
@Override
public void multiPercolate(MultiPercolateRequest request, ActionListener<MultiPercolateResponse> listener) {
delegate.multiPercolate(request, listener);
}
@Override
public MultiPercolateRequestBuilder prepareMultiPercolate() {
return delegate.prepareMultiPercolate();
}
@Override
public ExplainRequestBuilder prepareExplain(String index, String type, String id) {
return delegate.prepareExplain(index, type, id);
}
@Override
public ActionFuture<ExplainResponse> explain(ExplainRequest request) {
return delegate.explain(request);
}
@Override
public void explain(ExplainRequest request, ActionListener<ExplainResponse> listener) {
delegate.explain(request, listener);
}
@Override
public ClearScrollRequestBuilder prepareClearScroll() {
return delegate.prepareClearScroll();
}
@Override
public ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request) {
return delegate.clearScroll(request);
}
@Override
public void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener) {
delegate.clearScroll(request, listener);
}
@Override
public ThreadPool threadPool() {
return delegate.threadPool();
}
@Override
public Settings settings() {
return delegate.settings();
}
@Override
public String toString() {
return "randomized(" + super.toString() + ")";
}
}
| 1no label
|
src_test_java_org_elasticsearch_test_client_RandomizingClient.java
|
3,460 |
public class HazelcastClientBeanDefinitionParser extends AbstractHazelcastBeanDefinitionParser {
protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) {
final SpringXmlBuilder springXmlBuilder = new SpringXmlBuilder(parserContext);
springXmlBuilder.handleClient(element);
return springXmlBuilder.getBeanDefinition();
}
private class SpringXmlBuilder extends SpringXmlBuilderHelper {
private final ParserContext parserContext;
private BeanDefinitionBuilder builder;
private ManagedMap nearCacheConfigMap;//= new HashMap<String, NearCacheConfig>();
public SpringXmlBuilder(ParserContext parserContext) {
this.parserContext = parserContext;
this.builder = BeanDefinitionBuilder.rootBeanDefinition(HazelcastClient.class);
this.builder.setFactoryMethod("newHazelcastClient");
this.builder.setDestroyMethodName("shutdown");
this.nearCacheConfigMap = new ManagedMap();
this.configBuilder = BeanDefinitionBuilder.rootBeanDefinition(ClientConfig.class);
configBuilder.addPropertyValue("nearCacheConfigMap", nearCacheConfigMap);
BeanDefinitionBuilder managedContextBeanBuilder = createBeanBuilder(SpringManagedContext.class);
this.configBuilder.addPropertyValue("managedContext", managedContextBeanBuilder.getBeanDefinition());
}
public AbstractBeanDefinition getBeanDefinition() {
return builder.getBeanDefinition();
}
public void handleClient(Element element) {
handleCommonBeanAttributes(element, builder, parserContext);
final NamedNodeMap attrs = element.getAttributes();
if (attrs != null) {
for (int a = 0; a < attrs.getLength(); a++) {
final org.w3c.dom.Node att = attrs.item(a);
final String name = att.getNodeName();
final String value = att.getNodeValue();
if ("executor-pool-size".equals(name)) {
configBuilder.addPropertyValue("executorPoolSize", value);
} else if ("credentials-ref".equals(name)) {
configBuilder.addPropertyReference("credentials", value);
}
}
}
for (org.w3c.dom.Node node : new IterableNodeList(element, Node.ELEMENT_NODE)) {
final String nodeName = cleanNodeName(node.getNodeName());
if ("group".equals(nodeName)) {
createAndFillBeanBuilder(node, GroupConfig.class, "groupConfig", configBuilder);
} else if ("properties".equals(nodeName)) {
handleProperties(node, configBuilder);
} else if ("network".equals(nodeName)) {
handleNetwork(node);
} else if ("listeners".equals(nodeName)) {
final List listeners = parseListeners(node, ListenerConfig.class);
configBuilder.addPropertyValue("listenerConfigs", listeners);
} else if ("serialization".equals(nodeName)) {
handleSerialization(node);
} else if ("proxy-factories".equals(nodeName)) {
final List list = parseProxyFactories(node, ProxyFactoryConfig.class);
configBuilder.addPropertyValue("proxyFactoryConfigs", list);
} else if ("load-balancer".equals(nodeName)) {
handleLoadBalancer(node);
} else if ("near-cache".equals(nodeName)) {
handleNearCache(node);
}
}
builder.addConstructorArgValue(configBuilder.getBeanDefinition());
}
private void handleNetwork(Node node) {
List<String> members = new ArrayList<String>(10);
fillAttributeValues(node, configBuilder);
for (org.w3c.dom.Node child : new IterableNodeList(node, Node.ELEMENT_NODE)) {
final String nodeName = cleanNodeName(child);
if ("member".equals(nodeName)) {
members.add(getTextContent(child));
} else if ("socket-options".equals(nodeName)) {
createAndFillBeanBuilder(child, SocketOptions.class, "socketOptions", configBuilder);
} else if ("socket-interceptor".equals(nodeName)) {
handleSocketInterceptorConfig(node, configBuilder);
} else if ("ssl".equals(nodeName)) {
handleSSLConfig(node, configBuilder);
}
}
configBuilder.addPropertyValue("addresses", members);
}
private void handleSSLConfig(final Node node, final BeanDefinitionBuilder networkConfigBuilder) {
BeanDefinitionBuilder sslConfigBuilder = createBeanBuilder(SSLConfig.class);
final String implAttribute = "factory-implementation";
fillAttributeValues(node, sslConfigBuilder, implAttribute);
Node implNode = node.getAttributes().getNamedItem(implAttribute);
String implementation = implNode != null ? getTextContent(implNode) : null;
if (implementation != null) {
sslConfigBuilder.addPropertyReference(xmlToJavaName(implAttribute), implementation);
}
for (org.w3c.dom.Node child : new IterableNodeList(node, Node.ELEMENT_NODE)) {
final String name = cleanNodeName(child);
if ("properties".equals(name)) {
handleProperties(child, sslConfigBuilder);
}
}
networkConfigBuilder.addPropertyValue("SSLConfig", sslConfigBuilder.getBeanDefinition());
}
private void handleLoadBalancer(Node node) {
final String type = getAttribute(node, "type");
if ("random".equals(type)) {
configBuilder.addPropertyValue("loadBalancer", new RandomLB());
} else if ("round-robin".equals(type)) {
configBuilder.addPropertyValue("loadBalancer", new RoundRobinLB());
}
}
private void handleNearCache(Node node) {
createAndFillListedBean(node, NearCacheConfig.class, "name", nearCacheConfigMap, "name");
}
}
}
| 1no label
|
hazelcast-spring_src_main_java_com_hazelcast_spring_HazelcastClientBeanDefinitionParser.java
|
1,193 |
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
@Override
public boolean onPortNumber(int portNumber) {
try {
channel = bootstrap.bind(new InetSocketAddress(hostAddress, portNumber));
} catch (Exception e) {
lastException.set(e);
return false;
}
return true;
}
});
| 0true
|
src_main_java_org_elasticsearch_bulk_udp_BulkUdpService.java
|
638 |
public class PeerRecoveryStatus {
public enum Stage {
INIT((byte) 0),
INDEX((byte) 1),
TRANSLOG((byte) 2),
FINALIZE((byte) 3),
DONE((byte) 4);
private final byte value;
Stage(byte value) {
this.value = value;
}
public byte value() {
return value;
}
public static Stage fromValue(byte value) {
if (value == 0) {
return INIT;
} else if (value == 1) {
return INDEX;
} else if (value == 2) {
return TRANSLOG;
} else if (value == 3) {
return FINALIZE;
} else if (value == 4) {
return DONE;
}
throw new ElasticsearchIllegalArgumentException("No stage found for [" + value + ']');
}
}
final Stage stage;
final long startTime;
final long time;
final long indexSize;
final long reusedIndexSize;
final long recoveredIndexSize;
final long recoveredTranslogOperations;
public PeerRecoveryStatus(Stage stage, long startTime, long time, long indexSize, long reusedIndexSize,
long recoveredIndexSize, long recoveredTranslogOperations) {
this.stage = stage;
this.startTime = startTime;
this.time = time;
this.indexSize = indexSize;
this.reusedIndexSize = reusedIndexSize;
this.recoveredIndexSize = recoveredIndexSize;
this.recoveredTranslogOperations = recoveredTranslogOperations;
}
public Stage getStage() {
return this.stage;
}
public long getStartTime() {
return this.startTime;
}
public TimeValue getTime() {
return TimeValue.timeValueMillis(time);
}
public ByteSizeValue getIndexSize() {
return new ByteSizeValue(indexSize);
}
public ByteSizeValue getReusedIndexSize() {
return new ByteSizeValue(reusedIndexSize);
}
public ByteSizeValue getExpectedRecoveredIndexSize() {
return new ByteSizeValue(indexSize - reusedIndexSize);
}
/**
* How much of the index has been recovered.
*/
public ByteSizeValue getRecoveredIndexSize() {
return new ByteSizeValue(recoveredIndexSize);
}
public int getIndexRecoveryProgress() {
if (recoveredIndexSize == 0) {
if (indexSize != 0 && indexSize == reusedIndexSize) {
return 100;
}
return 0;
}
return (int) (((double) recoveredIndexSize) / getExpectedRecoveredIndexSize().bytes() * 100);
}
public long getRecoveredTranslogOperations() {
return recoveredTranslogOperations;
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_indices_status_PeerRecoveryStatus.java
|
585 |
public class TaxHostException extends TaxException {
private static final long serialVersionUID = 1L;
public TaxHostException() {
super();
}
public TaxHostException(String message, Throwable cause) {
super(message, cause);
}
public TaxHostException(String message) {
super(message);
}
public TaxHostException(Throwable cause) {
super(cause);
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_vendor_service_exception_TaxHostException.java
|
2,229 |
class CustomBoostFactorWeight extends Weight {
final Weight subQueryWeight;
final Bits[] docSets;
public CustomBoostFactorWeight(Weight subQueryWeight, int filterFunctionLength) throws IOException {
this.subQueryWeight = subQueryWeight;
this.docSets = new Bits[filterFunctionLength];
}
public Query getQuery() {
return FiltersFunctionScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = subQueryWeight.getValueForNormalization();
sum *= getBoost() * getBoost();
return sum;
}
@Override
public void normalize(float norm, float topLevelBoost) {
subQueryWeight.normalize(norm, topLevelBoost * getBoost());
}
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
// we ignore scoreDocsInOrder parameter, because we need to score in
// order if documents are scored with a script. The
// ShardLookup depends on in order scoring.
Scorer subQueryScorer = subQueryWeight.scorer(context, true, false, acceptDocs);
if (subQueryScorer == null) {
return null;
}
for (int i = 0; i < filterFunctions.length; i++) {
FilterFunction filterFunction = filterFunctions[i];
filterFunction.function.setNextReader(context);
docSets[i] = DocIdSets.toSafeBits(context.reader(), filterFunction.filter.getDocIdSet(context, acceptDocs));
}
return new CustomBoostFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, docSets, combineFunction);
}
@Override
public Explanation explain(AtomicReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
}
// First: Gather explanations for all filters
List<ComplexExplanation> filterExplanations = new ArrayList<ComplexExplanation>();
for (FilterFunction filterFunction : filterFunctions) {
Bits docSet = DocIdSets.toSafeBits(context.reader(),
filterFunction.filter.getDocIdSet(context, context.reader().getLiveDocs()));
if (docSet.get(doc)) {
filterFunction.function.setNextReader(context);
Explanation functionExplanation = filterFunction.function.explainScore(doc, subQueryExpl);
double factor = functionExplanation.getValue();
float sc = CombineFunction.toFloat(factor);
ComplexExplanation filterExplanation = new ComplexExplanation(true, sc, "function score, product of:");
filterExplanation.addDetail(new Explanation(1.0f, "match filter: " + filterFunction.filter.toString()));
filterExplanation.addDetail(functionExplanation);
filterExplanations.add(filterExplanation);
}
}
if (filterExplanations.size() == 0) {
float sc = getBoost() * subQueryExpl.getValue();
Explanation res = new ComplexExplanation(true, sc, "function score, no filter match, product of:");
res.addDetail(subQueryExpl);
res.addDetail(new Explanation(getBoost(), "queryBoost"));
return res;
}
// Second: Compute the factor that would have been computed by the
// filters
double factor = 1.0;
switch (scoreMode) {
case First:
factor = filterExplanations.get(0).getValue();
break;
case Max:
factor = Double.NEGATIVE_INFINITY;
for (int i = 0; i < filterExplanations.size(); i++) {
factor = Math.max(filterExplanations.get(i).getValue(), factor);
}
break;
case Min:
factor = Double.POSITIVE_INFINITY;
for (int i = 0; i < filterExplanations.size(); i++) {
factor = Math.min(filterExplanations.get(i).getValue(), factor);
}
break;
case Multiply:
for (int i = 0; i < filterExplanations.size(); i++) {
factor *= filterExplanations.get(i).getValue();
}
break;
default: // Avg / Total
double totalFactor = 0.0f;
int count = 0;
for (int i = 0; i < filterExplanations.size(); i++) {
totalFactor += filterExplanations.get(i).getValue();
count++;
}
if (count != 0) {
factor = totalFactor;
if (scoreMode == ScoreMode.Avg) {
factor /= count;
}
}
}
ComplexExplanation factorExplanaition = new ComplexExplanation(true, CombineFunction.toFloat(factor),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]");
for (int i = 0; i < filterExplanations.size(); i++) {
factorExplanaition.addDetail(filterExplanations.get(i));
}
return combineFunction.explain(getBoost(), subQueryExpl, factorExplanaition, maxBoost);
}
}
| 1no label
|
src_main_java_org_elasticsearch_common_lucene_search_function_FiltersFunctionScoreQuery.java
|
340 |
public class NodeReplaceInsert extends BaseHandler {
private static final Log LOG = LogFactory.getLog(NodeReplaceInsert.class);
private static final Comparator<Node> NODE_COMPARATOR = new Comparator<Node>() {
public int compare(Node arg0, Node arg1) {
int response = -1;
if (arg0.isSameNode(arg1)) {
response = 0;
}
//determine if the element is an ancestor
if (response != 0) {
boolean eof = false;
Node parentNode = arg0;
while (!eof) {
parentNode = parentNode.getParentNode();
if (parentNode == null) {
eof = true;
} else if (arg1.isSameNode(parentNode)) {
response = 0;
eof = true;
}
}
}
return response;
}
};
public Node[] merge(List<Node> nodeList1, List<Node> nodeList2, List<Node> exhaustedNodes) {
if (CollectionUtils.isEmpty(nodeList1) || CollectionUtils.isEmpty(nodeList2)) {
return null;
}
Node[] primaryNodes = new Node[nodeList1.size()];
for (int j=0;j<primaryNodes.length;j++){
primaryNodes[j] = nodeList1.get(j);
}
ArrayList<Node> list = new ArrayList<Node>();
for (int j=0;j<nodeList2.size();j++){
list.add(nodeList2.get(j));
}
List<Node> usedNodes = matchNodes(exhaustedNodes, primaryNodes, list);
Node[] response = {};
response = usedNodes.toArray(response);
return response;
}
private boolean exhaustedNodesContains(List<Node> exhaustedNodes, Node node) {
boolean contains = false;
for (Node exhaustedNode : exhaustedNodes) {
if (NODE_COMPARATOR.compare(exhaustedNode, node) == 0) {
contains = true;
break;
}
}
return contains;
}
private List<Node> matchNodes(List<Node> exhaustedNodes, Node[] primaryNodes, ArrayList<Node> list) {
List<Node> usedNodes = new ArrayList<Node>(20);
Iterator<Node> itr = list.iterator();
Node parentNode = primaryNodes[0].getParentNode();
Document ownerDocument = parentNode.getOwnerDocument();
while(itr.hasNext()) {
Node node = itr.next();
if (Element.class.isAssignableFrom(node.getClass()) && !exhaustedNodesContains(exhaustedNodes, node)) {
if(LOG.isDebugEnabled()) {
StringBuffer sb = new StringBuffer();
sb.append("matching node for replacement: ");
sb.append(node.getNodeName());
int attrLength = node.getAttributes().getLength();
for (int j=0;j<attrLength;j++){
sb.append(" : (");
sb.append(node.getAttributes().item(j).getNodeName());
sb.append("/");
sb.append(node.getAttributes().item(j).getNodeValue());
sb.append(")");
}
LOG.debug(sb.toString());
}
if (!checkNode(usedNodes, primaryNodes, node)) {
//simply append the node if all the above fails
Node newNode = ownerDocument.importNode(node.cloneNode(true), true);
parentNode.appendChild(newNode);
usedNodes.add(node);
}
}
}
return usedNodes;
}
protected boolean checkNode(List<Node> usedNodes, Node[] primaryNodes, Node node) {
//find matching nodes based on id
if (replaceNode(primaryNodes, node, "id", usedNodes)) {
return true;
}
//find matching nodes based on name
if (replaceNode(primaryNodes, node, "name", usedNodes)) {
return true;
}
//check if this same node already exists
if (exactNodeExists(primaryNodes, node, usedNodes)) {
return true;
}
return false;
}
protected boolean exactNodeExists(Node[] primaryNodes, Node testNode, List<Node> usedNodes) {
for (int j=0;j<primaryNodes.length;j++){
if (primaryNodes[j].isEqualNode(testNode)) {
usedNodes.add(primaryNodes[j]);
return true;
}
}
return false;
}
protected boolean replaceNode(Node[] primaryNodes, Node testNode, final String attribute, List<Node> usedNodes) {
if (testNode.getAttributes().getNamedItem(attribute) == null) {
return false;
}
//filter out primary nodes that don't have the attribute
ArrayList<Node> filterList = new ArrayList<Node>();
for (int j=0;j<primaryNodes.length;j++){
if (primaryNodes[j].getAttributes().getNamedItem(attribute) != null) {
filterList.add(primaryNodes[j]);
}
}
Node[] filtered = {};
filtered = filterList.toArray(filtered);
Comparator<Node> idCompare = new Comparator<Node>() {
public int compare(Node arg0, Node arg1) {
Node id1 = arg0.getAttributes().getNamedItem(attribute);
Node id2 = arg1.getAttributes().getNamedItem(attribute);
String idVal1 = id1.getNodeValue();
String idVal2 = id2.getNodeValue();
return idVal1.compareTo(idVal2);
}
};
Arrays.sort(filtered, idCompare);
int pos = Arrays.binarySearch(filtered, testNode, idCompare);
if (pos >= 0) {
Node newNode = filtered[pos].getOwnerDocument().importNode(testNode.cloneNode(true), true);
filtered[pos].getParentNode().replaceChild(newNode, filtered[pos]);
usedNodes.add(testNode);
return true;
}
return false;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_extensibility_context_merge_handlers_NodeReplaceInsert.java
|
1,604 |
public class Property implements Serializable {
private static final long serialVersionUID = 1L;
protected String name;
protected String value;
protected String displayValue;
protected FieldMetadata metadata = new BasicFieldMetadata();
protected boolean isAdvancedCollection = false;
protected Boolean isDirty = false;
protected String unHtmlEncodedValue;
protected String rawValue;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public FieldMetadata getMetadata() {
return metadata;
}
public void setMetadata(FieldMetadata metadata) {
this.metadata = metadata;
}
public String getDisplayValue() {
return displayValue;
}
public void setDisplayValue(String displayValue) {
this.displayValue = displayValue;
}
public Boolean getIsDirty() {
return isDirty;
}
public void setIsDirty(Boolean isDirty) {
this.isDirty = isDirty;
}
public String getUnHtmlEncodedValue() {
return unHtmlEncodedValue;
}
public void setUnHtmlEncodedValue(String unHtmlEncodedValue) {
this.unHtmlEncodedValue = unHtmlEncodedValue;
}
public String getRawValue() {
return rawValue;
}
public void setRawValue(String rawValue) {
this.rawValue = rawValue;
}
public boolean isAdvancedCollection() {
return isAdvancedCollection;
}
public void setAdvancedCollection(boolean advancedCollection) {
isAdvancedCollection = advancedCollection;
}
@Override
public String toString() {
return getName() + ": " + getValue();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((metadata == null || metadata instanceof CollectionMetadata || ((BasicFieldMetadata) metadata).getMergedPropertyType() == null) ? 0 : ((BasicFieldMetadata) metadata).getMergedPropertyType().hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Property other = (Property) obj;
if (metadata == null || metadata instanceof CollectionMetadata || ((BasicFieldMetadata) metadata).getMergedPropertyType() == null) {
if (other.metadata != null && other.metadata instanceof BasicFieldMetadata && ((BasicFieldMetadata) other.metadata).getMergedPropertyType() != null)
return false;
} else if (metadata instanceof BasicFieldMetadata && other.metadata instanceof BasicFieldMetadata && !((BasicFieldMetadata) metadata).getMergedPropertyType().equals(((BasicFieldMetadata) other.metadata).getMergedPropertyType()))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_dto_Property.java
|
344 |
static class MapTryLockThread extends TestHelper {
public MapTryLockThread(IMap map, String upKey, String downKey){
super(map, upKey, downKey);
}
public void doRun() throws Exception{
if(map.tryLock(upKey)){
try{
if(map.tryLock(downKey)){
try {
work();
}finally {
map.unlock(downKey);
}
}
}finally {
map.unlock(upKey);
}
}
}
}
| 0true
|
hazelcast-client_src_test_java_com_hazelcast_client_map_ClientMapTryLockConcurrentTests.java
|
65 |
public class OSharedLockEntry<REQUESTER_TYPE> {
/** The requester lock : generally {@link Thread} or {@link Runnable}. */
protected REQUESTER_TYPE requester;
/**
* Count shared locks held by this requester for the resource.
* <p>
* Used for reentrancy : when the same requester acquire a shared lock for the same resource in a nested code.
*/
protected int countSharedLocks;
/** Next shared lock for the same resource by an other requester. */
protected OSharedLockEntry<REQUESTER_TYPE> nextSharedLock;
protected OSharedLockEntry() {
}
public OSharedLockEntry(final REQUESTER_TYPE iRequester) {
super();
requester = iRequester;
countSharedLocks = 1;
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_concur_lock_OSharedLockEntry.java
|
3,400 |
public static class SendEventOperation extends AbstractOperation {
private EventPacket eventPacket;
private int orderKey;
public SendEventOperation() {
}
public SendEventOperation(EventPacket eventPacket, int orderKey) {
this.eventPacket = eventPacket;
this.orderKey = orderKey;
}
@Override
public void run() throws Exception {
EventServiceImpl eventService = (EventServiceImpl) getNodeEngine().getEventService();
eventService.executeEvent(eventService.new EventPacketProcessor(eventPacket, orderKey));
}
@Override
public boolean returnsResponse() {
return true;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
eventPacket.writeData(out);
out.writeInt(orderKey);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
eventPacket = new EventPacket();
eventPacket.readData(in);
orderKey = in.readInt();
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_spi_impl_EventServiceImpl.java
|
107 |
public class OIOException extends OException {
private static final long serialVersionUID = -3003977236203691448L;
public OIOException(String string) {
super(string);
}
public OIOException(String message, Throwable cause) {
super(message, cause);
}
}
| 0true
|
commons_src_main_java_com_orientechnologies_common_io_OIOException.java
|
1,445 |
public class SnapshotId implements Serializable, Streamable {
private String repository;
private String snapshot;
// Caching hash code
private int hashCode;
private SnapshotId() {
}
/**
* Constructs new snapshot id
*
* @param repository repository name
* @param snapshot snapshot name
*/
public SnapshotId(String repository, String snapshot) {
this.repository = repository;
this.snapshot = snapshot;
this.hashCode = computeHashCode();
}
/**
* Returns repository name
*
* @return repository name
*/
public String getRepository() {
return repository;
}
/**
* Returns snapshot name
*
* @return snapshot name
*/
public String getSnapshot() {
return snapshot;
}
@Override
public String toString() {
return repository + ":" + snapshot;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null) return false;
SnapshotId snapshotId = (SnapshotId) o;
return snapshot.equals(snapshotId.snapshot) && repository.equals(snapshotId.repository);
}
@Override
public int hashCode() {
return hashCode;
}
private int computeHashCode() {
int result = repository != null ? repository.hashCode() : 0;
result = 31 * result + snapshot.hashCode();
return result;
}
/**
* Reads snapshot id from stream input
*
* @param in stream input
* @return snapshot id
* @throws IOException
*/
public static SnapshotId readSnapshotId(StreamInput in) throws IOException {
SnapshotId snapshot = new SnapshotId();
snapshot.readFrom(in);
return snapshot;
}
/**
* {@inheritDoc}
*/
@Override
public void readFrom(StreamInput in) throws IOException {
repository = in.readString();
snapshot = in.readString();
hashCode = computeHashCode();
}
/**
* {@inheritDoc}
*/
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(repository);
out.writeString(snapshot);
}
}
| 0true
|
src_main_java_org_elasticsearch_cluster_metadata_SnapshotId.java
|
1,387 |
public static class Map extends Mapper<NullWritable, FaunusElement, LongWritable, FaunusVertex> {
private final HashMap<Long, FaunusVertex> map = new HashMap<Long, FaunusVertex>();
private static final int MAX_MAP_SIZE = 5000;
private final LongWritable longWritable = new LongWritable();
private int counter = 0;
private Configuration faunusConf;
@Override
public void setup(Context context) {
faunusConf = ModifiableHadoopConfiguration.of(DEFAULT_COMPAT.getContextConfiguration(context));
}
@Override
public void map(final NullWritable key, final FaunusElement value, final Mapper<NullWritable, FaunusElement, LongWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
if (value instanceof StandardFaunusEdge) {
final long outId = ((StandardFaunusEdge) value).getVertexId(OUT);
final long inId = ((StandardFaunusEdge) value).getVertexId(IN);
FaunusVertex vertex = this.map.get(outId);
if (null == vertex) {
vertex = new FaunusVertex(faunusConf, outId);
this.map.put(outId, vertex);
}
vertex.addEdge(OUT, WritableUtils.clone((StandardFaunusEdge) value, context.getConfiguration()));
this.counter++;
vertex = this.map.get(inId);
if (null == vertex) {
vertex = new FaunusVertex(faunusConf, inId);
this.map.put(inId, vertex);
}
vertex.addEdge(IN, WritableUtils.clone((StandardFaunusEdge) value, context.getConfiguration()));
DEFAULT_COMPAT.incrementContextCounter(context, Counters.EDGES_PROCESSED, 1L);
this.counter++;
} else {
final long id = value.getLongId();
FaunusVertex vertex = this.map.get(id);
if (null == vertex) {
vertex = new FaunusVertex(faunusConf, id);
this.map.put(id, vertex);
}
vertex.addAllProperties(value.getPropertyCollection());
vertex.addEdges(BOTH, WritableUtils.clone((FaunusVertex) value, context.getConfiguration()));
this.counter++;
}
if (this.counter > MAX_MAP_SIZE)
this.flush(context);
}
@Override
public void cleanup(final Mapper<NullWritable, FaunusElement, LongWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
this.flush(context);
}
private void flush(final Mapper<NullWritable, FaunusElement, LongWritable, FaunusVertex>.Context context) throws IOException, InterruptedException {
for (final FaunusVertex vertex : this.map.values()) {
this.longWritable.set(vertex.getLongId());
context.write(this.longWritable, vertex);
DEFAULT_COMPAT.incrementContextCounter(context, Counters.VERTICES_EMITTED, 1L);
}
this.map.clear();
this.counter = 0;
}
}
| 1no label
|
titan-hadoop-parent_titan-hadoop-core_src_main_java_com_thinkaurelius_titan_hadoop_formats_edgelist_EdgeListInputMapReduce.java
|
251 |
@Deprecated
public class Broadleaf2CompatibilityMySQL5InnoDBDialect extends MySQL5InnoDBDialect {
public Broadleaf2CompatibilityMySQL5InnoDBDialect() {
super();
registerColumnType( java.sql.Types.BOOLEAN, "bit" );
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_dialect_Broadleaf2CompatibilityMySQL5InnoDBDialect.java
|
497 |
return scheduledExecutor.scheduleWithFixedDelay(new Runnable() {
public void run() {
executeInternal(command);
}
}, initialDelay, period, unit);
| 1no label
|
hazelcast-client_src_main_java_com_hazelcast_client_spi_impl_ClientExecutionServiceImpl.java
|
225 |
private static class HazelcastInstanceAwareObject implements HazelcastInstanceAware {
HazelcastInstance hazelcastInstance;
public HazelcastInstance getHazelcastInstance() {
return hazelcastInstance;
}
@Override
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
this.hazelcastInstance = hazelcastInstance;
}
}
| 0true
|
hazelcast-client_src_main_java_com_hazelcast_client_examples_ClientTestApp.java
|
1,236 |
final class RootFolderScanner implements IResourceVisitor {
private final Module defaultModule;
private final JDTModelLoader modelLoader;
private final JDTModuleManager moduleManager;
private final IFolderVirtualFile rootDir;
private final TypeChecker typeChecker;
private final List<IFile> scannedFiles;
private final PhasedUnits phasedUnits;
private Module module;
private SubMonitor monitor;
private boolean isInResourceForlder = false;
private boolean isInSourceForlder = false;
private RootFolderType rootFolderType;
RootFolderScanner(RootFolderType rootFolderType, Module defaultModule, JDTModelLoader modelLoader,
JDTModuleManager moduleManager, IFolderVirtualFile rootDir, TypeChecker typeChecker,
List<IFile> scannedFiles, PhasedUnits phasedUnits, SubMonitor monitor) {
this.rootFolderType = rootFolderType;
this.isInResourceForlder = rootFolderType.equals(RootFolderType.RESOURCE);
this.isInSourceForlder = rootFolderType.equals(RootFolderType.SOURCE);
this.defaultModule = defaultModule;
this.modelLoader = modelLoader;
this.moduleManager = moduleManager;
this.rootDir = rootDir;
this.typeChecker = typeChecker;
this.scannedFiles = scannedFiles;
this.phasedUnits = phasedUnits;
this.monitor = monitor;
}
public boolean visit(IResource resource) throws CoreException {
Package pkg;
monitor.setWorkRemaining(10000);
monitor.worked(1);
if (resource.equals(rootDir.getResource())) {
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_PACKAGE_MODEL, new WeakReference<Package>(modelLoader.findPackage("")));
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_ROOT_FOLDER, rootDir.getFolder());
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_ROOT_FOLDER_TYPE, rootFolderType);
return true;
}
if (resource.getParent().equals(rootDir.getResource())) {
// We've come back to a source directory child :
// => reset the current Module to default and set the package to emptyPackage
module = defaultModule;
pkg = modelLoader.findPackage("");
assert(pkg != null);
}
if (resource instanceof IFolder) {
List<String> pkgName = getPackageName((IFolder)resource);
String pkgNameAsString = formatPath(pkgName);
if ( module != defaultModule ) {
if (! pkgNameAsString.startsWith(module.getNameAsString() + ".")) {
// We've ran above the last module => reset module to default
module = defaultModule;
}
}
Module realModule = modelLoader.getLoadedModule(pkgNameAsString);
if (realModule != null) {
// The module descriptor had probably been found in another source directory
module = realModule;
}
pkg = modelLoader.findOrCreatePackage(module, pkgNameAsString);
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_PACKAGE_MODEL, new WeakReference<Package>(pkg));
resource.setSessionProperty(CeylonBuilder.RESOURCE_PROPERTY_ROOT_FOLDER, rootDir.getFolder());
return true;
}
if (resource instanceof IFile) {
IFile file = (IFile) resource;
if (file.exists()) {
boolean isSourceFile = isInSourceForlder && isCompilable(file);
if (isInResourceForlder || isSourceFile ) {
List<String> pkgName = getPackageName(file.getParent());
String pkgNameAsString = formatPath(pkgName);
pkg = modelLoader.findOrCreatePackage(module, pkgNameAsString);
if (scannedFiles != null) {
scannedFiles.add((IFile)resource);
}
if (isSourceFile) {
if (CeylonBuilder.isCeylon(file)) {
ResourceVirtualFile virtualFile = createResourceVirtualFile(file);
try {
PhasedUnit newPhasedUnit = CeylonBuilder.parseFileToPhasedUnit(moduleManager,
typeChecker, virtualFile, rootDir, pkg);
phasedUnits.addPhasedUnit(virtualFile, newPhasedUnit);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
}
}
}
return false;
}
private List<String> getPackageName(IContainer container) {
List<String> pkgName = Arrays.asList(container.getProjectRelativePath()
.makeRelativeTo(rootDir.getResource().getProjectRelativePath()).segments());
return pkgName;
}
}
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_builder_RootFolderScanner.java
|
106 |
static final class ValueSpliterator<K,V> extends Traverser<K,V>
implements ConcurrentHashMapSpliterator<V> {
long est; // size estimate
ValueSpliterator(Node<K,V>[] tab, int size, int index, int limit,
long est) {
super(tab, size, index, limit);
this.est = est;
}
public ConcurrentHashMapSpliterator<V> trySplit() {
int i, f, h;
return (h = ((i = baseIndex) + (f = baseLimit)) >>> 1) <= i ? null :
new ValueSpliterator<K,V>(tab, baseSize, baseLimit = h,
f, est >>>= 1);
}
public void forEachRemaining(Action<? super V> action) {
if (action == null) throw new NullPointerException();
for (Node<K,V> p; (p = advance()) != null;)
action.apply(p.val);
}
public boolean tryAdvance(Action<? super V> action) {
if (action == null) throw new NullPointerException();
Node<K,V> p;
if ((p = advance()) == null)
return false;
action.apply(p.val);
return true;
}
public long estimateSize() { return est; }
}
| 0true
|
src_main_java_jsr166e_ConcurrentHashMapV8.java
|
4,722 |
public class RepositoriesService extends AbstractComponent implements ClusterStateListener {
private final RepositoryTypesRegistry typesRegistry;
private final Injector injector;
private final ClusterService clusterService;
private volatile ImmutableMap<String, RepositoryHolder> repositories = ImmutableMap.of();
@Inject
public RepositoriesService(Settings settings, ClusterService clusterService, RepositoryTypesRegistry typesRegistry, Injector injector) {
super(settings);
this.typesRegistry = typesRegistry;
this.injector = injector;
this.clusterService = clusterService;
// Doesn't make sense to maintain repositories on non-master and non-data nodes
// Nothing happens there anyway
if (DiscoveryNode.dataNode(settings) || DiscoveryNode.masterNode(settings)) {
clusterService.add(this);
}
}
/**
* Registers new repository in the cluster
* <p/>
* This method can be only called on the master node. It tries to create a new repository on the master
* and if it was successful it adds new repository to cluster metadata.
*
* @param request register repository request
* @param listener register repository listener
*/
public void registerRepository(final RegisterRepositoryRequest request, final ActionListener<RegisterRepositoryResponse> listener) {
final RepositoryMetaData newRepositoryMetaData = new RepositoryMetaData(request.name, request.type, request.settings);
clusterService.submitStateUpdateTask(request.cause, new AckedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
ensureRepositoryNotInUse(currentState, request.name);
// Trying to create the new repository on master to make sure it works
if (!registerRepository(newRepositoryMetaData)) {
// The new repository has the same settings as the old one - ignore
return currentState;
}
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
if (repositories == null) {
logger.info("put repository [{}]", request.name);
repositories = new RepositoriesMetaData(new RepositoryMetaData(request.name, request.type, request.settings));
} else {
boolean found = false;
List<RepositoryMetaData> repositoriesMetaData = new ArrayList<RepositoryMetaData>(repositories.repositories().size() + 1);
for (RepositoryMetaData repositoryMetaData : repositories.repositories()) {
if (repositoryMetaData.name().equals(newRepositoryMetaData.name())) {
found = true;
repositoriesMetaData.add(newRepositoryMetaData);
} else {
repositoriesMetaData.add(repositoryMetaData);
}
}
if (!found) {
logger.info("put repository [{}]", request.name);
repositoriesMetaData.add(new RepositoryMetaData(request.name, request.type, request.settings));
} else {
logger.info("update repository [{}]", request.name);
}
repositories = new RepositoriesMetaData(repositoriesMetaData.toArray(new RepositoryMetaData[repositoriesMetaData.size()]));
}
mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("failed to create repository [{}]", t, request.name);
listener.onFailure(t);
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return discoveryNode.masterNode();
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
listener.onResponse(new RegisterRepositoryResponse(true));
}
@Override
public void onAckTimeout() {
listener.onResponse(new RegisterRepositoryResponse(false));
}
@Override
public TimeValue ackTimeout() {
return request.ackTimeout();
}
});
}
/**
* Unregisters repository in the cluster
* <p/>
* This method can be only called on the master node. It removes repository information from cluster metadata.
*
* @param request unregister repository request
* @param listener unregister repository listener
*/
public void unregisterRepository(final UnregisterRepositoryRequest request, final ActionListener<UnregisterRepositoryResponse> listener) {
clusterService.submitStateUpdateTask(request.cause, new AckedClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
ensureRepositoryNotInUse(currentState, request.name);
MetaData metaData = currentState.metaData();
MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData());
RepositoriesMetaData repositories = metaData.custom(RepositoriesMetaData.TYPE);
if (repositories != null && repositories.repositories().size() > 0) {
List<RepositoryMetaData> repositoriesMetaData = new ArrayList<RepositoryMetaData>(repositories.repositories().size());
boolean changed = false;
for (RepositoryMetaData repositoryMetaData : repositories.repositories()) {
if (Regex.simpleMatch(request.name, repositoryMetaData.name())) {
logger.info("delete repository [{}]", repositoryMetaData.name());
changed = true;
} else {
repositoriesMetaData.add(repositoryMetaData);
}
}
if (changed) {
repositories = new RepositoriesMetaData(repositoriesMetaData.toArray(new RepositoryMetaData[repositoriesMetaData.size()]));
mdBuilder.putCustom(RepositoriesMetaData.TYPE, repositories);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
}
throw new RepositoryMissingException(request.name);
}
@Override
public void onFailure(String source, Throwable t) {
listener.onFailure(t);
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
// Since operation occurs only on masters, it's enough that only master-eligible nodes acked
return discoveryNode.masterNode();
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
listener.onResponse(new UnregisterRepositoryResponse(true));
}
@Override
public void onAckTimeout() {
listener.onResponse(new UnregisterRepositoryResponse(false));
}
@Override
public TimeValue ackTimeout() {
return request.ackTimeout();
}
});
}
/**
* Checks if new repositories appeared in or disappeared from cluster metadata and updates current list of
* repositories accordingly.
*
* @param event cluster changed event
*/
@Override
public void clusterChanged(ClusterChangedEvent event) {
try {
RepositoriesMetaData oldMetaData = event.previousState().getMetaData().custom(RepositoriesMetaData.TYPE);
RepositoriesMetaData newMetaData = event.state().getMetaData().custom(RepositoriesMetaData.TYPE);
// Check if repositories got changed
if ((oldMetaData == null && newMetaData == null) || (oldMetaData != null && oldMetaData.equals(newMetaData))) {
return;
}
Map<String, RepositoryHolder> survivors = newHashMap();
// First, remove repositories that are no longer there
for (Map.Entry<String, RepositoryHolder> entry : repositories.entrySet()) {
if (newMetaData == null || newMetaData.repository(entry.getKey()) == null) {
closeRepository(entry.getKey(), entry.getValue());
} else {
survivors.put(entry.getKey(), entry.getValue());
}
}
ImmutableMap.Builder<String, RepositoryHolder> builder = ImmutableMap.builder();
if (newMetaData != null) {
// Now go through all repositories and update existing or create missing
for (RepositoryMetaData repositoryMetaData : newMetaData.repositories()) {
RepositoryHolder holder = survivors.get(repositoryMetaData.name());
if (holder != null) {
// Found previous version of this repository
if (!holder.type.equals(repositoryMetaData.type()) || !holder.settings.equals(repositoryMetaData.settings())) {
// Previous version is different from the version in settings
closeRepository(repositoryMetaData.name(), holder);
holder = createRepositoryHolder(repositoryMetaData);
}
} else {
holder = createRepositoryHolder(repositoryMetaData);
}
if (holder != null) {
builder.put(repositoryMetaData.name(), holder);
}
}
}
repositories = builder.build();
} catch (Throwable ex) {
logger.warn("failure updating cluster state ", ex);
}
}
/**
* Returns registered repository
* <p/>
* This method is called only on the master node
*
* @param repository repository name
* @return registered repository
* @throws RepositoryMissingException if repository with such name isn't registered
*/
public Repository repository(String repository) {
RepositoryHolder holder = repositories.get(repository);
if (holder != null) {
return holder.repository;
}
throw new RepositoryMissingException(repository);
}
/**
* Returns registered index shard repository
* <p/>
* This method is called only on data nodes
*
* @param repository repository name
* @return registered repository
* @throws RepositoryMissingException if repository with such name isn't registered
*/
public IndexShardRepository indexShardRepository(String repository) {
RepositoryHolder holder = repositories.get(repository);
if (holder != null) {
return holder.indexShardRepository;
}
throw new RepositoryMissingException(repository);
}
/**
* Creates a new repository and adds it to the list of registered repositories.
* <p/>
* If a repository with the same name but different types or settings already exists, it will be closed and
* replaced with the new repository. If a repository with the same name exists but it has the same type and settings
* the new repository is ignored.
*
* @param repositoryMetaData new repository metadata
* @return {@code true} if new repository was added or {@code false} if it was ignored
*/
private boolean registerRepository(RepositoryMetaData repositoryMetaData) {
RepositoryHolder previous = repositories.get(repositoryMetaData.name());
if (previous != null) {
if (!previous.type.equals(repositoryMetaData.type()) && previous.settings.equals(repositoryMetaData.settings())) {
// Previous version is the same as this one - ignore it
return false;
}
}
RepositoryHolder holder = createRepositoryHolder(repositoryMetaData);
if (previous != null) {
// Closing previous version
closeRepository(repositoryMetaData.name(), previous);
}
Map<String, RepositoryHolder> newRepositories = newHashMap(repositories);
newRepositories.put(repositoryMetaData.name(), holder);
repositories = ImmutableMap.copyOf(newRepositories);
return true;
}
/**
* Closes the repository
*
* @param name repository name
* @param holder repository holder
*/
private void closeRepository(String name, RepositoryHolder holder) {
logger.debug("closing repository [{}][{}]", holder.type, name);
if (holder.injector != null) {
Injectors.close(holder.injector);
}
if (holder.repository != null) {
holder.repository.close();
}
}
/**
* Creates repository holder
*/
private RepositoryHolder createRepositoryHolder(RepositoryMetaData repositoryMetaData) {
logger.debug("creating repository [{}][{}]", repositoryMetaData.type(), repositoryMetaData.name());
Injector repositoryInjector = null;
try {
ModulesBuilder modules = new ModulesBuilder();
RepositoryName name = new RepositoryName(repositoryMetaData.type(), repositoryMetaData.name());
modules.add(new RepositoryNameModule(name));
modules.add(new RepositoryModule(name, repositoryMetaData.settings(), this.settings, typesRegistry));
repositoryInjector = modules.createChildInjector(injector);
Repository repository = repositoryInjector.getInstance(Repository.class);
IndexShardRepository indexShardRepository = repositoryInjector.getInstance(IndexShardRepository.class);
repository.start();
return new RepositoryHolder(repositoryMetaData.type(), repositoryMetaData.settings(), repositoryInjector, repository, indexShardRepository);
} catch (Throwable t) {
if (repositoryInjector != null) {
Injectors.close(repositoryInjector);
}
logger.warn("failed to create repository [{}][{}]", t, repositoryMetaData.type(), repositoryMetaData.name());
throw new RepositoryException(repositoryMetaData.name(), "failed to create repository", t);
}
}
private void ensureRepositoryNotInUse(ClusterState clusterState, String repository) {
if (SnapshotsService.isRepositoryInUse(clusterState, repository) || RestoreService.isRepositoryInUse(clusterState, repository)) {
throw new ElasticsearchIllegalStateException("trying to modify or unregister repository that is currently used ");
}
}
/**
* Internal data structure for holding repository with its configuration information and injector
*/
private static class RepositoryHolder {
private final String type;
private final Settings settings;
private final Injector injector;
private final Repository repository;
private final IndexShardRepository indexShardRepository;
public RepositoryHolder(String type, Settings settings, Injector injector, Repository repository, IndexShardRepository indexShardRepository) {
this.type = type;
this.settings = settings;
this.repository = repository;
this.indexShardRepository = indexShardRepository;
this.injector = injector;
}
}
/**
* Register repository request
*/
public static class RegisterRepositoryRequest extends ClusterStateUpdateRequest<RegisterRepositoryRequest> {
final String cause;
final String name;
final String type;
Settings settings = EMPTY_SETTINGS;
/**
* Constructs new register repository request
*
* @param cause repository registration cause
* @param name repository name
* @param type repository type
*/
public RegisterRepositoryRequest(String cause, String name, String type) {
this.cause = cause;
this.name = name;
this.type = type;
}
/**
* Sets repository settings
*
* @param settings repository settings
* @return this request
*/
public RegisterRepositoryRequest settings(Settings settings) {
this.settings = settings;
return this;
}
}
/**
* Register repository response
*/
public static class RegisterRepositoryResponse extends ClusterStateUpdateResponse {
RegisterRepositoryResponse(boolean acknowledged) {
super(acknowledged);
}
}
/**
* Unregister repository request
*/
public static class UnregisterRepositoryRequest extends ClusterStateUpdateRequest<UnregisterRepositoryRequest> {
final String cause;
final String name;
/**
* Creates a new unregister repository request
*
* @param cause repository unregistration cause
* @param name repository name
*/
public UnregisterRepositoryRequest(String cause, String name) {
this.cause = cause;
this.name = name;
}
}
/**
* Unregister repository response
*/
public static class UnregisterRepositoryResponse extends ClusterStateUpdateResponse {
UnregisterRepositoryResponse(boolean acknowledged) {
super(acknowledged);
}
}
}
| 1no label
|
src_main_java_org_elasticsearch_repositories_RepositoriesService.java
|
80 |
class ChangeParametersProposal implements ICompletionProposal,
ICompletionProposalExtension6 {
private final Declaration dec;
private final CeylonEditor editor;
ChangeParametersProposal(Declaration dec, CeylonEditor editor) {
this.dec = dec;
this.editor = editor;
}
@Override
public Point getSelection(IDocument doc) {
return null;
}
@Override
public Image getImage() {
return REORDER;
}
@Override
public String getDisplayString() {
return "Change parameters of '" + dec.getName() + "'";
}
@Override
public IContextInformation getContextInformation() {
return null;
}
@Override
public String getAdditionalProposalInfo() {
return null;
}
@Override
public void apply(IDocument doc) {
new ChangeParametersRefactoringAction(editor).run();
}
@Override
public StyledString getStyledDisplayString() {
return Highlights.styleProposal(getDisplayString(), false);
}
public static void add(Collection<ICompletionProposal> proposals,
CeylonEditor editor) {
ChangeParametersRefactoring cpr = new ChangeParametersRefactoring(editor);
if (cpr.isEnabled()) {
proposals.add(new ChangeParametersProposal(cpr.getDeclaration(),
editor));
}
}
}
| 0true
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_code_correct_ChangeParametersProposal.java
|
1,513 |
public class NodeVersionAllocationDeciderTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(NodeVersionAllocationDeciderTests.class);
@Test
public void testDoNotAllocateFromPrimary() {
AllocationService strategy = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1)
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").numberOfShards(5).numberOfReplicas(2))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(5));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(2).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(2).currentNodeId(), nullValue());
}
logger.info("start two nodes and fully start the shards");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(UNASSIGNED).size(), equalTo(2));
}
logger.info("start all the primary shards, replicas will start initializing");
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(INITIALIZING).size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(UNASSIGNED).size(), equalTo(1));
}
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(STARTED).size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(UNASSIGNED).size(), equalTo(1));
}
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.put(newNode("node3", getPreviousVersion())))
.build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(STARTED).size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(UNASSIGNED).size(), equalTo(1));
}
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.put(newNode("node4")))
.build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(STARTED).size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(INITIALIZING).size(), equalTo(1));
}
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShardsWithState(STARTED).size(), equalTo(2));
}
}
@Test
public void testRandom() {
AllocationService service = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1)
.build());
logger.info("Building initial routing table");
MetaData.Builder builder = MetaData.builder();
RoutingTable.Builder rtBuilder = RoutingTable.builder();
int numIndices = between(1, 20);
for (int i = 0; i < numIndices; i++) {
builder.put(IndexMetaData.builder("test_" + i).numberOfShards(between(1, 5)).numberOfReplicas(between(0, 2)));
}
MetaData metaData = builder.build();
for (int i = 0; i < numIndices; i++) {
rtBuilder.addAsNew(metaData.index("test_" + i));
}
RoutingTable routingTable = rtBuilder.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.shardsWithState(UNASSIGNED).size(), equalTo(routingTable.allShards().size()));
List<DiscoveryNode> nodes = new ArrayList<DiscoveryNode>();
int nodeIdx = 0;
int iters = atLeast(10);
for (int i = 0; i < iters; i++) {
DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder();
int numNodes = between(1, 20);
if (nodes.size() > numNodes) {
Collections.shuffle(nodes, getRandom());
nodes = nodes.subList(0, numNodes);
} else {
for (int j = nodes.size(); j < numNodes; j++) {
if (frequently()) {
nodes.add(newNode("node" + (nodeIdx++), randomBoolean() ? getPreviousVersion() : Version.CURRENT));
} else {
nodes.add(newNode("node" + (nodeIdx++), randomVersion()));
}
}
}
for (DiscoveryNode node : nodes) {
nodesBuilder.put(node);
}
clusterState = ClusterState.builder(clusterState).nodes(nodesBuilder).build();
clusterState = stabilize(clusterState, service);
}
}
@Test
public void testRollingRestart() {
AllocationService service = createAllocationService(settingsBuilder()
.put("cluster.routing.allocation.concurrent_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1)
.build());
logger.info("Building initial routing table");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").numberOfShards(5).numberOfReplicas(2))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(5));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(2).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(2).currentNodeId(), nullValue());
}
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("old0", getPreviousVersion()))
.put(newNode("old1", getPreviousVersion()))
.put(newNode("old2", getPreviousVersion()))).build();
clusterState = stabilize(clusterState, service);
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("old0", getPreviousVersion()))
.put(newNode("old1", getPreviousVersion()))
.put(newNode("new0"))).build();
clusterState = stabilize(clusterState, service);
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("node0", getPreviousVersion()))
.put(newNode("new1"))
.put(newNode("new0"))).build();
clusterState = stabilize(clusterState, service);
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
.put(newNode("new2"))
.put(newNode("new1"))
.put(newNode("new0"))).build();
clusterState = stabilize(clusterState, service);
routingTable = clusterState.routingTable();
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(3));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).shards().get(2).state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), notNullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), notNullValue());
assertThat(routingTable.index("test").shard(i).shards().get(2).currentNodeId(), notNullValue());
}
}
private ClusterState stabilize(ClusterState clusterState, AllocationService service) {
logger.trace("RoutingNodes: {}", clusterState.routingNodes().prettyPrint());
RoutingTable routingTable = service.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
RoutingNodes routingNodes = clusterState.routingNodes();
assertRecoveryNodeVersions(routingNodes);
logger.info("complete rebalancing");
RoutingTable prev = routingTable;
boolean stable = false;
for (int i = 0; i < 1000; i++) { // at most 200 iters - this should be enough for all tests
logger.trace("RoutingNodes: {}", clusterState.getRoutingNodes().prettyPrint());
routingTable = service.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
if (stable = (routingTable == prev)) {
break;
}
assertRecoveryNodeVersions(routingNodes);
prev = routingTable;
}
logger.info("stabilized success [{}]", stable);
assertThat(stable, is(true));
return clusterState;
}
private final void assertRecoveryNodeVersions(RoutingNodes routingNodes) {
logger.trace("RoutingNodes: {}", routingNodes.prettyPrint());
List<MutableShardRouting> mutableShardRoutings = routingNodes.shardsWithState(ShardRoutingState.RELOCATING);
for (MutableShardRouting r : mutableShardRoutings) {
String toId = r.relocatingNodeId();
String fromId = r.currentNodeId();
assertThat(fromId, notNullValue());
assertThat(toId, notNullValue());
logger.trace("From: " + fromId + " with Version: " + routingNodes.node(fromId).node().version() + " to: " + toId + " with Version: " + routingNodes.node(toId).node().version());
assertTrue(routingNodes.node(toId).node().version().onOrAfter(routingNodes.node(fromId).node().version()));
}
mutableShardRoutings = routingNodes.shardsWithState(ShardRoutingState.INITIALIZING);
for (MutableShardRouting r : mutableShardRoutings) {
if (r.initializing() && r.relocatingNodeId() == null && !r.primary()) {
MutableShardRouting primary = routingNodes.activePrimary(r);
assertThat(primary, notNullValue());
String fromId = primary.currentNodeId();
String toId = r.currentNodeId();
logger.trace("From: " + fromId + " with Version: " + routingNodes.node(fromId).node().version() + " to: " + toId + " with Version: " + routingNodes.node(toId).node().version());
assertTrue(routingNodes.node(toId).node().version().onOrAfter(routingNodes.node(fromId).node().version()));
}
}
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_routing_allocation_NodeVersionAllocationDeciderTests.java
|
582 |
public class TransportOptimizeAction extends TransportBroadcastOperationAction<OptimizeRequest, OptimizeResponse, ShardOptimizeRequest, ShardOptimizeResponse> {
private final IndicesService indicesService;
@Inject
public TransportOptimizeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService) {
super(settings, threadPool, clusterService, transportService);
this.indicesService = indicesService;
}
@Override
protected String executor() {
return ThreadPool.Names.OPTIMIZE;
}
@Override
protected String transportAction() {
return OptimizeAction.NAME;
}
@Override
protected OptimizeRequest newRequest() {
return new OptimizeRequest();
}
@Override
protected OptimizeResponse newResponse(OptimizeRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
int successfulShards = 0;
int failedShards = 0;
List<ShardOperationFailedException> shardFailures = null;
for (int i = 0; i < shardsResponses.length(); i++) {
Object shardResponse = shardsResponses.get(i);
if (shardResponse == null) {
// a non active shard, ignore...
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
failedShards++;
if (shardFailures == null) {
shardFailures = newArrayList();
}
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
} else {
successfulShards++;
}
}
return new OptimizeResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures);
}
@Override
protected ShardOptimizeRequest newShardRequest() {
return new ShardOptimizeRequest();
}
@Override
protected ShardOptimizeRequest newShardRequest(ShardRouting shard, OptimizeRequest request) {
return new ShardOptimizeRequest(shard.index(), shard.id(), request);
}
@Override
protected ShardOptimizeResponse newShardResponse() {
return new ShardOptimizeResponse();
}
@Override
protected ShardOptimizeResponse shardOperation(ShardOptimizeRequest request) throws ElasticsearchException {
IndexShard indexShard = indicesService.indexServiceSafe(request.index()).shardSafe(request.shardId());
indexShard.optimize(new Engine.Optimize()
.waitForMerge(request.waitForMerge())
.maxNumSegments(request.maxNumSegments())
.onlyExpungeDeletes(request.onlyExpungeDeletes())
.flush(request.flush())
);
return new ShardOptimizeResponse(request.index(), request.shardId());
}
/**
* The refresh request works against *all* shards.
*/
@Override
protected GroupShardsIterator shards(ClusterState clusterState, OptimizeRequest request, String[] concreteIndices) {
return clusterState.routingTable().allActiveShardsGrouped(concreteIndices, true);
}
@Override
protected ClusterBlockException checkGlobalBlock(ClusterState state, OptimizeRequest request) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA);
}
@Override
protected ClusterBlockException checkRequestBlock(ClusterState state, OptimizeRequest request, String[] concreteIndices) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA, concreteIndices);
}
}
| 1no label
|
src_main_java_org_elasticsearch_action_admin_indices_optimize_TransportOptimizeAction.java
|
1,824 |
public class AddSearchMappingRequest {
private final PersistencePerspective persistencePerspective;
private final CriteriaTransferObject requestedCto;
private final String ceilingEntityFullyQualifiedClassname;
private final Map<String, FieldMetadata> mergedProperties;
private final String propertyName;
private final FieldManager fieldManager;
private final DataFormatProvider dataFormatProvider;
private final RestrictionFactory restrictionFactory;
public AddSearchMappingRequest(PersistencePerspective persistencePerspective, CriteriaTransferObject
requestedCto, String ceilingEntityFullyQualifiedClassname, Map<String, FieldMetadata> mergedProperties,
String propertyName, FieldManager fieldManager,
DataFormatProvider dataFormatProvider, RestrictionFactory restrictionFactory) {
this.persistencePerspective = persistencePerspective;
this.requestedCto = requestedCto;
this.ceilingEntityFullyQualifiedClassname = ceilingEntityFullyQualifiedClassname;
this.mergedProperties = mergedProperties;
this.propertyName = propertyName;
this.fieldManager = fieldManager;
this.dataFormatProvider = dataFormatProvider;
this.restrictionFactory = restrictionFactory;
}
public PersistencePerspective getPersistencePerspective() {
return persistencePerspective;
}
public CriteriaTransferObject getRequestedCto() {
return requestedCto;
}
public String getCeilingEntityFullyQualifiedClassname() {
return ceilingEntityFullyQualifiedClassname;
}
public Map<String, FieldMetadata> getMergedProperties() {
return mergedProperties;
}
public String getPropertyName() {
return propertyName;
}
public FieldManager getFieldManager() {
return fieldManager;
}
public DataFormatProvider getDataFormatProvider() {
return dataFormatProvider;
}
public RestrictionFactory getRestrictionFactory() {
return restrictionFactory;
}
}
| 1no label
|
admin_broadleaf-open-admin-platform_src_main_java_org_broadleafcommerce_openadmin_server_service_persistence_module_provider_request_AddSearchMappingRequest.java
|
1,293 |
clusterService.submitStateUpdateTask("test", new AckedClusterStateUpdateTask() {
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return true;
}
@Override
public void onAllNodesAcked(@Nullable Throwable t) {
allNodesAcked.set(true);
latch.countDown();
}
@Override
public void onAckTimeout() {
ackTimeout.set(true);
latch.countDown();
}
@Override
public TimeValue ackTimeout() {
return TimeValue.timeValueSeconds(10);
}
@Override
public TimeValue timeout() {
return TimeValue.timeValueSeconds(10);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
processedLatch.countDown();
}
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
executed.set(true);
return currentState;
}
@Override
public void onFailure(String source, Throwable t) {
logger.error("failed to execute callback in test {}", t, source);
onFailure.set(true);
latch.countDown();
}
});
| 0true
|
src_test_java_org_elasticsearch_cluster_ClusterServiceTests.java
|
305 |
public class ClusterHealthAction extends ClusterAction<ClusterHealthRequest, ClusterHealthResponse, ClusterHealthRequestBuilder> {
public static final ClusterHealthAction INSTANCE = new ClusterHealthAction();
public static final String NAME = "cluster/health";
private ClusterHealthAction() {
super(NAME);
}
@Override
public ClusterHealthResponse newResponse() {
return new ClusterHealthResponse();
}
@Override
public ClusterHealthRequestBuilder newRequestBuilder(ClusterAdminClient client) {
return new ClusterHealthRequestBuilder(client);
}
}
| 0true
|
src_main_java_org_elasticsearch_action_admin_cluster_health_ClusterHealthAction.java
|
1,353 |
tokenStream.getTokens()) {
@Override
protected boolean reuseExistingDescriptorModels() {
return true;
}
};
| 1no label
|
plugins_com.redhat.ceylon.eclipse.ui_src_com_redhat_ceylon_eclipse_core_model_ProjectSourceFile.java
|
1,375 |
Collections.sort(indexesToLock, new Comparator<OIndex<?>>() {
public int compare(final OIndex<?> indexOne, final OIndex<?> indexTwo) {
return indexOne.getName().compareTo(indexTwo.getName());
}
});
| 1no label
|
core_src_main_java_com_orientechnologies_orient_core_tx_OTransactionOptimistic.java
|
145 |
BackendOperation.execute(new Callable<Boolean>(){
@Override
public Boolean call() throws Exception {
config.getBackend().clearStorage();
return true;
}
@Override
public String toString() { return "ClearBackend"; }
},new StandardDuration(20, TimeUnit.SECONDS));
| 0true
|
titan-core_src_main_java_com_thinkaurelius_titan_core_util_TitanCleanup.java
|
120 |
public abstract class ForkJoinTask<V> implements Future<V>, Serializable {
/*
* See the internal documentation of class ForkJoinPool for a
* general implementation overview. ForkJoinTasks are mainly
* responsible for maintaining their "status" field amidst relays
* to methods in ForkJoinWorkerThread and ForkJoinPool.
*
* The methods of this class are more-or-less layered into
* (1) basic status maintenance
* (2) execution and awaiting completion
* (3) user-level methods that additionally report results.
* This is sometimes hard to see because this file orders exported
* methods in a way that flows well in javadocs.
*/
/*
* The status field holds run control status bits packed into a
* single int to minimize footprint and to ensure atomicity (via
* CAS). Status is initially zero, and takes on nonnegative
* values until completed, upon which status (anded with
* DONE_MASK) holds value NORMAL, CANCELLED, or EXCEPTIONAL. Tasks
* undergoing blocking waits by other threads have the SIGNAL bit
* set. Completion of a stolen task with SIGNAL set awakens any
* waiters via notifyAll. Even though suboptimal for some
* purposes, we use basic builtin wait/notify to take advantage of
* "monitor inflation" in JVMs that we would otherwise need to
* emulate to avoid adding further per-task bookkeeping overhead.
* We want these monitors to be "fat", i.e., not use biasing or
* thin-lock techniques, so use some odd coding idioms that tend
* to avoid them, mainly by arranging that every synchronized
* block performs a wait, notifyAll or both.
*
* These control bits occupy only (some of) the upper half (16
* bits) of status field. The lower bits are used for user-defined
* tags.
*/
/** The run status of this task */
volatile int status; // accessed directly by pool and workers
static final int DONE_MASK = 0xf0000000; // mask out non-completion bits
static final int NORMAL = 0xf0000000; // must be negative
static final int CANCELLED = 0xc0000000; // must be < NORMAL
static final int EXCEPTIONAL = 0x80000000; // must be < CANCELLED
static final int SIGNAL = 0x00010000; // must be >= 1 << 16
static final int SMASK = 0x0000ffff; // short bits for tags
/**
* Marks completion and wakes up threads waiting to join this
* task.
*
* @param completion one of NORMAL, CANCELLED, EXCEPTIONAL
* @return completion status on exit
*/
private int setCompletion(int completion) {
for (int s;;) {
if ((s = status) < 0)
return s;
if (U.compareAndSwapInt(this, STATUS, s, s | completion)) {
if ((s >>> 16) != 0)
synchronized (this) { notifyAll(); }
return completion;
}
}
}
/**
* Primary execution method for stolen tasks. Unless done, calls
* exec and records status if completed, but doesn't wait for
* completion otherwise.
*
* @return status on exit from this method
*/
final int doExec() {
int s; boolean completed;
if ((s = status) >= 0) {
try {
completed = exec();
} catch (Throwable rex) {
return setExceptionalCompletion(rex);
}
if (completed)
s = setCompletion(NORMAL);
}
return s;
}
/**
* Tries to set SIGNAL status unless already completed. Used by
* ForkJoinPool. Other variants are directly incorporated into
* externalAwaitDone etc.
*
* @return true if successful
*/
final boolean trySetSignal() {
int s = status;
return s >= 0 && U.compareAndSwapInt(this, STATUS, s, s | SIGNAL);
}
/**
* Blocks a non-worker-thread until completion.
* @return status upon completion
*/
private int externalAwaitDone() {
int s;
ForkJoinPool cp = ForkJoinPool.common;
if ((s = status) >= 0) {
if (cp != null) {
if (this instanceof CountedCompleter)
s = cp.externalHelpComplete((CountedCompleter<?>)this);
else if (cp.tryExternalUnpush(this))
s = doExec();
}
if (s >= 0 && (s = status) >= 0) {
boolean interrupted = false;
do {
if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
synchronized (this) {
if (status >= 0) {
try {
wait();
} catch (InterruptedException ie) {
interrupted = true;
}
}
else
notifyAll();
}
}
} while ((s = status) >= 0);
if (interrupted)
Thread.currentThread().interrupt();
}
}
return s;
}
/**
* Blocks a non-worker-thread until completion or interruption.
*/
private int externalInterruptibleAwaitDone() throws InterruptedException {
int s;
ForkJoinPool cp = ForkJoinPool.common;
if (Thread.interrupted())
throw new InterruptedException();
if ((s = status) >= 0 && cp != null) {
if (this instanceof CountedCompleter)
cp.externalHelpComplete((CountedCompleter<?>)this);
else if (cp.tryExternalUnpush(this))
doExec();
}
while ((s = status) >= 0) {
if (U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
synchronized (this) {
if (status >= 0)
wait();
else
notifyAll();
}
}
}
return s;
}
/**
* Implementation for join, get, quietlyJoin. Directly handles
* only cases of already-completed, external wait, and
* unfork+exec. Others are relayed to ForkJoinPool.awaitJoin.
*
* @return status upon completion
*/
private int doJoin() {
int s; Thread t; ForkJoinWorkerThread wt; ForkJoinPool.WorkQueue w;
return (s = status) < 0 ? s :
((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
(w = (wt = (ForkJoinWorkerThread)t).workQueue).
tryUnpush(this) && (s = doExec()) < 0 ? s :
wt.pool.awaitJoin(w, this) :
externalAwaitDone();
}
/**
* Implementation for invoke, quietlyInvoke.
*
* @return status upon completion
*/
private int doInvoke() {
int s; Thread t; ForkJoinWorkerThread wt;
return (s = doExec()) < 0 ? s :
((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
(wt = (ForkJoinWorkerThread)t).pool.awaitJoin(wt.workQueue, this) :
externalAwaitDone();
}
// Exception table support
/**
* Table of exceptions thrown by tasks, to enable reporting by
* callers. Because exceptions are rare, we don't directly keep
* them with task objects, but instead use a weak ref table. Note
* that cancellation exceptions don't appear in the table, but are
* instead recorded as status values.
*
* Note: These statics are initialized below in static block.
*/
private static final ExceptionNode[] exceptionTable;
private static final ReentrantLock exceptionTableLock;
private static final ReferenceQueue<Object> exceptionTableRefQueue;
/**
* Fixed capacity for exceptionTable.
*/
private static final int EXCEPTION_MAP_CAPACITY = 32;
/**
* Key-value nodes for exception table. The chained hash table
* uses identity comparisons, full locking, and weak references
* for keys. The table has a fixed capacity because it only
* maintains task exceptions long enough for joiners to access
* them, so should never become very large for sustained
* periods. However, since we do not know when the last joiner
* completes, we must use weak references and expunge them. We do
* so on each operation (hence full locking). Also, some thread in
* any ForkJoinPool will call helpExpungeStaleExceptions when its
* pool becomes isQuiescent.
*/
static final class ExceptionNode extends WeakReference<ForkJoinTask<?>> {
final Throwable ex;
ExceptionNode next;
final long thrower; // use id not ref to avoid weak cycles
final int hashCode; // store task hashCode before weak ref disappears
ExceptionNode(ForkJoinTask<?> task, Throwable ex, ExceptionNode next) {
super(task, exceptionTableRefQueue);
this.ex = ex;
this.next = next;
this.thrower = Thread.currentThread().getId();
this.hashCode = System.identityHashCode(task);
}
}
/**
* Records exception and sets status.
*
* @return status on exit
*/
final int recordExceptionalCompletion(Throwable ex) {
int s;
if ((s = status) >= 0) {
int h = System.identityHashCode(this);
final ReentrantLock lock = exceptionTableLock;
lock.lock();
try {
expungeStaleExceptions();
ExceptionNode[] t = exceptionTable;
int i = h & (t.length - 1);
for (ExceptionNode e = t[i]; ; e = e.next) {
if (e == null) {
t[i] = new ExceptionNode(this, ex, t[i]);
break;
}
if (e.get() == this) // already present
break;
}
} finally {
lock.unlock();
}
s = setCompletion(EXCEPTIONAL);
}
return s;
}
/**
* Records exception and possibly propagates.
*
* @return status on exit
*/
private int setExceptionalCompletion(Throwable ex) {
int s = recordExceptionalCompletion(ex);
if ((s & DONE_MASK) == EXCEPTIONAL)
internalPropagateException(ex);
return s;
}
/**
* Hook for exception propagation support for tasks with completers.
*/
void internalPropagateException(Throwable ex) {
}
/**
* Cancels, ignoring any exceptions thrown by cancel. Used during
* worker and pool shutdown. Cancel is spec'ed not to throw any
* exceptions, but if it does anyway, we have no recourse during
* shutdown, so guard against this case.
*/
static final void cancelIgnoringExceptions(ForkJoinTask<?> t) {
if (t != null && t.status >= 0) {
try {
t.cancel(false);
} catch (Throwable ignore) {
}
}
}
/**
* Removes exception node and clears status.
*/
private void clearExceptionalCompletion() {
int h = System.identityHashCode(this);
final ReentrantLock lock = exceptionTableLock;
lock.lock();
try {
ExceptionNode[] t = exceptionTable;
int i = h & (t.length - 1);
ExceptionNode e = t[i];
ExceptionNode pred = null;
while (e != null) {
ExceptionNode next = e.next;
if (e.get() == this) {
if (pred == null)
t[i] = next;
else
pred.next = next;
break;
}
pred = e;
e = next;
}
expungeStaleExceptions();
status = 0;
} finally {
lock.unlock();
}
}
/**
* Returns a rethrowable exception for the given task, if
* available. To provide accurate stack traces, if the exception
* was not thrown by the current thread, we try to create a new
* exception of the same type as the one thrown, but with the
* recorded exception as its cause. If there is no such
* constructor, we instead try to use a no-arg constructor,
* followed by initCause, to the same effect. If none of these
* apply, or any fail due to other exceptions, we return the
* recorded exception, which is still correct, although it may
* contain a misleading stack trace.
*
* @return the exception, or null if none
*/
private Throwable getThrowableException() {
if ((status & DONE_MASK) != EXCEPTIONAL)
return null;
int h = System.identityHashCode(this);
ExceptionNode e;
final ReentrantLock lock = exceptionTableLock;
lock.lock();
try {
expungeStaleExceptions();
ExceptionNode[] t = exceptionTable;
e = t[h & (t.length - 1)];
while (e != null && e.get() != this)
e = e.next;
} finally {
lock.unlock();
}
Throwable ex;
if (e == null || (ex = e.ex) == null)
return null;
if (false && e.thrower != Thread.currentThread().getId()) {
Class<? extends Throwable> ec = ex.getClass();
try {
Constructor<?> noArgCtor = null;
Constructor<?>[] cs = ec.getConstructors();// public ctors only
for (int i = 0; i < cs.length; ++i) {
Constructor<?> c = cs[i];
Class<?>[] ps = c.getParameterTypes();
if (ps.length == 0)
noArgCtor = c;
else if (ps.length == 1 && ps[0] == Throwable.class)
return (Throwable)(c.newInstance(ex));
}
if (noArgCtor != null) {
Throwable wx = (Throwable)(noArgCtor.newInstance());
wx.initCause(ex);
return wx;
}
} catch (Exception ignore) {
}
}
return ex;
}
/**
* Poll stale refs and remove them. Call only while holding lock.
*/
/**
* Poll stale refs and remove them. Call only while holding lock.
*/
private static void expungeStaleExceptions() {
for (Object x; (x = exceptionTableRefQueue.poll()) != null;) {
if (x instanceof ExceptionNode) {
int hashCode = ((ExceptionNode)x).hashCode;
ExceptionNode[] t = exceptionTable;
int i = hashCode & (t.length - 1);
ExceptionNode e = t[i];
ExceptionNode pred = null;
while (e != null) {
ExceptionNode next = e.next;
if (e == x) {
if (pred == null)
t[i] = next;
else
pred.next = next;
break;
}
pred = e;
e = next;
}
}
}
}
/**
* If lock is available, poll stale refs and remove them.
* Called from ForkJoinPool when pools become quiescent.
*/
static final void helpExpungeStaleExceptions() {
final ReentrantLock lock = exceptionTableLock;
if (lock.tryLock()) {
try {
expungeStaleExceptions();
} finally {
lock.unlock();
}
}
}
/**
* A version of "sneaky throw" to relay exceptions
*/
static void rethrow(Throwable ex) {
if (ex != null)
ForkJoinTask.<RuntimeException>uncheckedThrow(ex);
}
/**
* The sneaky part of sneaky throw, relying on generics
* limitations to evade compiler complaints about rethrowing
* unchecked exceptions
*/
@SuppressWarnings("unchecked") static <T extends Throwable>
void uncheckedThrow(Throwable t) throws T {
throw (T)t; // rely on vacuous cast
}
/**
* Throws exception, if any, associated with the given status.
*/
private void reportException(int s) {
if (s == CANCELLED)
throw new CancellationException();
if (s == EXCEPTIONAL)
rethrow(getThrowableException());
}
// public methods
/**
* Arranges to asynchronously execute this task in the pool the
* current task is running in, if applicable, or using the {@link
* ForkJoinPool#commonPool()} if not {@link #inForkJoinPool}. While
* it is not necessarily enforced, it is a usage error to fork a
* task more than once unless it has completed and been
* reinitialized. Subsequent modifications to the state of this
* task or any data it operates on are not necessarily
* consistently observable by any thread other than the one
* executing it unless preceded by a call to {@link #join} or
* related methods, or a call to {@link #isDone} returning {@code
* true}.
*
* @return {@code this}, to simplify usage
*/
public final ForkJoinTask<V> fork() {
Thread t;
if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
((ForkJoinWorkerThread)t).workQueue.push(this);
else
ForkJoinPool.common.externalPush(this);
return this;
}
/**
* Returns the result of the computation when it {@link #isDone is
* done}. This method differs from {@link #get()} in that
* abnormal completion results in {@code RuntimeException} or
* {@code Error}, not {@code ExecutionException}, and that
* interrupts of the calling thread do <em>not</em> cause the
* method to abruptly return by throwing {@code
* InterruptedException}.
*
* @return the computed result
*/
public final V join() {
int s;
if ((s = doJoin() & DONE_MASK) != NORMAL)
reportException(s);
return getRawResult();
}
/**
* Commences performing this task, awaits its completion if
* necessary, and returns its result, or throws an (unchecked)
* {@code RuntimeException} or {@code Error} if the underlying
* computation did so.
*
* @return the computed result
*/
public final V invoke() {
int s;
if ((s = doInvoke() & DONE_MASK) != NORMAL)
reportException(s);
return getRawResult();
}
/**
* Forks the given tasks, returning when {@code isDone} holds for
* each task or an (unchecked) exception is encountered, in which
* case the exception is rethrown. If more than one task
* encounters an exception, then this method throws any one of
* these exceptions. If any task encounters an exception, the
* other may be cancelled. However, the execution status of
* individual tasks is not guaranteed upon exceptional return. The
* status of each task may be obtained using {@link
* #getException()} and related methods to check if they have been
* cancelled, completed normally or exceptionally, or left
* unprocessed.
*
* @param t1 the first task
* @param t2 the second task
* @throws NullPointerException if any task is null
*/
public static void invokeAll(ForkJoinTask<?> t1, ForkJoinTask<?> t2) {
int s1, s2;
t2.fork();
if ((s1 = t1.doInvoke() & DONE_MASK) != NORMAL)
t1.reportException(s1);
if ((s2 = t2.doJoin() & DONE_MASK) != NORMAL)
t2.reportException(s2);
}
/**
* Forks the given tasks, returning when {@code isDone} holds for
* each task or an (unchecked) exception is encountered, in which
* case the exception is rethrown. If more than one task
* encounters an exception, then this method throws any one of
* these exceptions. If any task encounters an exception, others
* may be cancelled. However, the execution status of individual
* tasks is not guaranteed upon exceptional return. The status of
* each task may be obtained using {@link #getException()} and
* related methods to check if they have been cancelled, completed
* normally or exceptionally, or left unprocessed.
*
* @param tasks the tasks
* @throws NullPointerException if any task is null
*/
public static void invokeAll(ForkJoinTask<?>... tasks) {
Throwable ex = null;
int last = tasks.length - 1;
for (int i = last; i >= 0; --i) {
ForkJoinTask<?> t = tasks[i];
if (t == null) {
if (ex == null)
ex = new NullPointerException();
}
else if (i != 0)
t.fork();
else if (t.doInvoke() < NORMAL && ex == null)
ex = t.getException();
}
for (int i = 1; i <= last; ++i) {
ForkJoinTask<?> t = tasks[i];
if (t != null) {
if (ex != null)
t.cancel(false);
else if (t.doJoin() < NORMAL)
ex = t.getException();
}
}
if (ex != null)
rethrow(ex);
}
/**
* Forks all tasks in the specified collection, returning when
* {@code isDone} holds for each task or an (unchecked) exception
* is encountered, in which case the exception is rethrown. If
* more than one task encounters an exception, then this method
* throws any one of these exceptions. If any task encounters an
* exception, others may be cancelled. However, the execution
* status of individual tasks is not guaranteed upon exceptional
* return. The status of each task may be obtained using {@link
* #getException()} and related methods to check if they have been
* cancelled, completed normally or exceptionally, or left
* unprocessed.
*
* @param tasks the collection of tasks
* @param <T> the type of the values returned from the tasks
* @return the tasks argument, to simplify usage
* @throws NullPointerException if tasks or any element are null
*/
public static <T extends ForkJoinTask<?>> Collection<T> invokeAll(Collection<T> tasks) {
if (!(tasks instanceof RandomAccess) || !(tasks instanceof List<?>)) {
invokeAll(tasks.toArray(new ForkJoinTask<?>[tasks.size()]));
return tasks;
}
@SuppressWarnings("unchecked")
List<? extends ForkJoinTask<?>> ts =
(List<? extends ForkJoinTask<?>>) tasks;
Throwable ex = null;
int last = ts.size() - 1;
for (int i = last; i >= 0; --i) {
ForkJoinTask<?> t = ts.get(i);
if (t == null) {
if (ex == null)
ex = new NullPointerException();
}
else if (i != 0)
t.fork();
else if (t.doInvoke() < NORMAL && ex == null)
ex = t.getException();
}
for (int i = 1; i <= last; ++i) {
ForkJoinTask<?> t = ts.get(i);
if (t != null) {
if (ex != null)
t.cancel(false);
else if (t.doJoin() < NORMAL)
ex = t.getException();
}
}
if (ex != null)
rethrow(ex);
return tasks;
}
/**
* Attempts to cancel execution of this task. This attempt will
* fail if the task has already completed or could not be
* cancelled for some other reason. If successful, and this task
* has not started when {@code cancel} is called, execution of
* this task is suppressed. After this method returns
* successfully, unless there is an intervening call to {@link
* #reinitialize}, subsequent calls to {@link #isCancelled},
* {@link #isDone}, and {@code cancel} will return {@code true}
* and calls to {@link #join} and related methods will result in
* {@code CancellationException}.
*
* <p>This method may be overridden in subclasses, but if so, must
* still ensure that these properties hold. In particular, the
* {@code cancel} method itself must not throw exceptions.
*
* <p>This method is designed to be invoked by <em>other</em>
* tasks. To terminate the current task, you can just return or
* throw an unchecked exception from its computation method, or
* invoke {@link #completeExceptionally(Throwable)}.
*
* @param mayInterruptIfRunning this value has no effect in the
* default implementation because interrupts are not used to
* control cancellation.
*
* @return {@code true} if this task is now cancelled
*/
public boolean cancel(boolean mayInterruptIfRunning) {
return (setCompletion(CANCELLED) & DONE_MASK) == CANCELLED;
}
public final boolean isDone() {
return status < 0;
}
public final boolean isCancelled() {
return (status & DONE_MASK) == CANCELLED;
}
/**
* Returns {@code true} if this task threw an exception or was cancelled.
*
* @return {@code true} if this task threw an exception or was cancelled
*/
public final boolean isCompletedAbnormally() {
return status < NORMAL;
}
/**
* Returns {@code true} if this task completed without throwing an
* exception and was not cancelled.
*
* @return {@code true} if this task completed without throwing an
* exception and was not cancelled
*/
public final boolean isCompletedNormally() {
return (status & DONE_MASK) == NORMAL;
}
/**
* Returns the exception thrown by the base computation, or a
* {@code CancellationException} if cancelled, or {@code null} if
* none or if the method has not yet completed.
*
* @return the exception, or {@code null} if none
*/
public final Throwable getException() {
int s = status & DONE_MASK;
return ((s >= NORMAL) ? null :
(s == CANCELLED) ? new CancellationException() :
getThrowableException());
}
/**
* Completes this task abnormally, and if not already aborted or
* cancelled, causes it to throw the given exception upon
* {@code join} and related operations. This method may be used
* to induce exceptions in asynchronous tasks, or to force
* completion of tasks that would not otherwise complete. Its use
* in other situations is discouraged. This method is
* overridable, but overridden versions must invoke {@code super}
* implementation to maintain guarantees.
*
* @param ex the exception to throw. If this exception is not a
* {@code RuntimeException} or {@code Error}, the actual exception
* thrown will be a {@code RuntimeException} with cause {@code ex}.
*/
public void completeExceptionally(Throwable ex) {
setExceptionalCompletion((ex instanceof RuntimeException) ||
(ex instanceof Error) ? ex :
new RuntimeException(ex));
}
/**
* Completes this task, and if not already aborted or cancelled,
* returning the given value as the result of subsequent
* invocations of {@code join} and related operations. This method
* may be used to provide results for asynchronous tasks, or to
* provide alternative handling for tasks that would not otherwise
* complete normally. Its use in other situations is
* discouraged. This method is overridable, but overridden
* versions must invoke {@code super} implementation to maintain
* guarantees.
*
* @param value the result value for this task
*/
public void complete(V value) {
try {
setRawResult(value);
} catch (Throwable rex) {
setExceptionalCompletion(rex);
return;
}
setCompletion(NORMAL);
}
/**
* Completes this task normally without setting a value. The most
* recent value established by {@link #setRawResult} (or {@code
* null} by default) will be returned as the result of subsequent
* invocations of {@code join} and related operations.
*
* @since 1.8
*/
public final void quietlyComplete() {
setCompletion(NORMAL);
}
/**
* Waits if necessary for the computation to complete, and then
* retrieves its result.
*
* @return the computed result
* @throws CancellationException if the computation was cancelled
* @throws ExecutionException if the computation threw an
* exception
* @throws InterruptedException if the current thread is not a
* member of a ForkJoinPool and was interrupted while waiting
*/
public final V get() throws InterruptedException, ExecutionException {
int s = (Thread.currentThread() instanceof ForkJoinWorkerThread) ?
doJoin() : externalInterruptibleAwaitDone();
Throwable ex;
if ((s &= DONE_MASK) == CANCELLED)
throw new CancellationException();
if (s == EXCEPTIONAL && (ex = getThrowableException()) != null)
throw new ExecutionException(ex);
return getRawResult();
}
/**
* Waits if necessary for at most the given time for the computation
* to complete, and then retrieves its result, if available.
*
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
* @return the computed result
* @throws CancellationException if the computation was cancelled
* @throws ExecutionException if the computation threw an
* exception
* @throws InterruptedException if the current thread is not a
* member of a ForkJoinPool and was interrupted while waiting
* @throws TimeoutException if the wait timed out
*/
public final V get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
if (Thread.interrupted())
throw new InterruptedException();
// Messy in part because we measure in nanosecs, but wait in millisecs
int s; long ms;
long ns = unit.toNanos(timeout);
ForkJoinPool cp;
if ((s = status) >= 0 && ns > 0L) {
long deadline = System.nanoTime() + ns;
ForkJoinPool p = null;
ForkJoinPool.WorkQueue w = null;
Thread t = Thread.currentThread();
if (t instanceof ForkJoinWorkerThread) {
ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
p = wt.pool;
w = wt.workQueue;
p.helpJoinOnce(w, this); // no retries on failure
}
else if ((cp = ForkJoinPool.common) != null) {
if (this instanceof CountedCompleter)
cp.externalHelpComplete((CountedCompleter<?>)this);
else if (cp.tryExternalUnpush(this))
doExec();
}
boolean canBlock = false;
boolean interrupted = false;
try {
while ((s = status) >= 0) {
if (w != null && w.qlock < 0)
cancelIgnoringExceptions(this);
else if (!canBlock) {
if (p == null || p.tryCompensate(p.ctl))
canBlock = true;
}
else {
if ((ms = TimeUnit.NANOSECONDS.toMillis(ns)) > 0L &&
U.compareAndSwapInt(this, STATUS, s, s | SIGNAL)) {
synchronized (this) {
if (status >= 0) {
try {
wait(ms);
} catch (InterruptedException ie) {
if (p == null)
interrupted = true;
}
}
else
notifyAll();
}
}
if ((s = status) < 0 || interrupted ||
(ns = deadline - System.nanoTime()) <= 0L)
break;
}
}
} finally {
if (p != null && canBlock)
p.incrementActiveCount();
}
if (interrupted)
throw new InterruptedException();
}
if ((s &= DONE_MASK) != NORMAL) {
Throwable ex;
if (s == CANCELLED)
throw new CancellationException();
if (s != EXCEPTIONAL)
throw new TimeoutException();
if ((ex = getThrowableException()) != null)
throw new ExecutionException(ex);
}
return getRawResult();
}
/**
* Joins this task, without returning its result or throwing its
* exception. This method may be useful when processing
* collections of tasks when some have been cancelled or otherwise
* known to have aborted.
*/
public final void quietlyJoin() {
doJoin();
}
/**
* Commences performing this task and awaits its completion if
* necessary, without returning its result or throwing its
* exception.
*/
public final void quietlyInvoke() {
doInvoke();
}
/**
* Possibly executes tasks until the pool hosting the current task
* {@link ForkJoinPool#isQuiescent is quiescent}. This method may
* be of use in designs in which many tasks are forked, but none
* are explicitly joined, instead executing them until all are
* processed.
*/
public static void helpQuiesce() {
Thread t;
if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) {
ForkJoinWorkerThread wt = (ForkJoinWorkerThread)t;
wt.pool.helpQuiescePool(wt.workQueue);
}
else
ForkJoinPool.quiesceCommonPool();
}
/**
* Resets the internal bookkeeping state of this task, allowing a
* subsequent {@code fork}. This method allows repeated reuse of
* this task, but only if reuse occurs when this task has either
* never been forked, or has been forked, then completed and all
* outstanding joins of this task have also completed. Effects
* under any other usage conditions are not guaranteed.
* This method may be useful when executing
* pre-constructed trees of subtasks in loops.
*
* <p>Upon completion of this method, {@code isDone()} reports
* {@code false}, and {@code getException()} reports {@code
* null}. However, the value returned by {@code getRawResult} is
* unaffected. To clear this value, you can invoke {@code
* setRawResult(null)}.
*/
public void reinitialize() {
if ((status & DONE_MASK) == EXCEPTIONAL)
clearExceptionalCompletion();
else
status = 0;
}
/**
* Returns the pool hosting the current task execution, or null
* if this task is executing outside of any ForkJoinPool.
*
* @see #inForkJoinPool
* @return the pool, or {@code null} if none
*/
public static ForkJoinPool getPool() {
Thread t = Thread.currentThread();
return (t instanceof ForkJoinWorkerThread) ?
((ForkJoinWorkerThread) t).pool : null;
}
/**
* Returns {@code true} if the current thread is a {@link
* ForkJoinWorkerThread} executing as a ForkJoinPool computation.
*
* @return {@code true} if the current thread is a {@link
* ForkJoinWorkerThread} executing as a ForkJoinPool computation,
* or {@code false} otherwise
*/
public static boolean inForkJoinPool() {
return Thread.currentThread() instanceof ForkJoinWorkerThread;
}
/**
* Tries to unschedule this task for execution. This method will
* typically (but is not guaranteed to) succeed if this task is
* the most recently forked task by the current thread, and has
* not commenced executing in another thread. This method may be
* useful when arranging alternative local processing of tasks
* that could have been, but were not, stolen.
*
* @return {@code true} if unforked
*/
public boolean tryUnfork() {
Thread t;
return (((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
((ForkJoinWorkerThread)t).workQueue.tryUnpush(this) :
ForkJoinPool.common.tryExternalUnpush(this));
}
/**
* Returns an estimate of the number of tasks that have been
* forked by the current worker thread but not yet executed. This
* value may be useful for heuristic decisions about whether to
* fork other tasks.
*
* @return the number of tasks
*/
public static int getQueuedTaskCount() {
Thread t; ForkJoinPool.WorkQueue q;
if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
q = ((ForkJoinWorkerThread)t).workQueue;
else
q = ForkJoinPool.commonSubmitterQueue();
return (q == null) ? 0 : q.queueSize();
}
/**
* Returns an estimate of how many more locally queued tasks are
* held by the current worker thread than there are other worker
* threads that might steal them, or zero if this thread is not
* operating in a ForkJoinPool. This value may be useful for
* heuristic decisions about whether to fork other tasks. In many
* usages of ForkJoinTasks, at steady state, each worker should
* aim to maintain a small constant surplus (for example, 3) of
* tasks, and to process computations locally if this threshold is
* exceeded.
*
* @return the surplus number of tasks, which may be negative
*/
public static int getSurplusQueuedTaskCount() {
return ForkJoinPool.getSurplusQueuedTaskCount();
}
// Extension methods
/**
* Returns the result that would be returned by {@link #join}, even
* if this task completed abnormally, or {@code null} if this task
* is not known to have been completed. This method is designed
* to aid debugging, as well as to support extensions. Its use in
* any other context is discouraged.
*
* @return the result, or {@code null} if not completed
*/
public abstract V getRawResult();
/**
* Forces the given value to be returned as a result. This method
* is designed to support extensions, and should not in general be
* called otherwise.
*
* @param value the value
*/
protected abstract void setRawResult(V value);
/**
* Immediately performs the base action of this task and returns
* true if, upon return from this method, this task is guaranteed
* to have completed normally. This method may return false
* otherwise, to indicate that this task is not necessarily
* complete (or is not known to be complete), for example in
* asynchronous actions that require explicit invocations of
* completion methods. This method may also throw an (unchecked)
* exception to indicate abnormal exit. This method is designed to
* support extensions, and should not in general be called
* otherwise.
*
* @return {@code true} if this task is known to have completed normally
*/
protected abstract boolean exec();
/**
* Returns, but does not unschedule or execute, a task queued by
* the current thread but not yet executed, if one is immediately
* available. There is no guarantee that this task will actually
* be polled or executed next. Conversely, this method may return
* null even if a task exists but cannot be accessed without
* contention with other threads. This method is designed
* primarily to support extensions, and is unlikely to be useful
* otherwise.
*
* @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> peekNextLocalTask() {
Thread t; ForkJoinPool.WorkQueue q;
if ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread)
q = ((ForkJoinWorkerThread)t).workQueue;
else
q = ForkJoinPool.commonSubmitterQueue();
return (q == null) ? null : q.peek();
}
/**
* Unschedules and returns, without executing, the next task
* queued by the current thread but not yet executed, if the
* current thread is operating in a ForkJoinPool. This method is
* designed primarily to support extensions, and is unlikely to be
* useful otherwise.
*
* @return the next task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollNextLocalTask() {
Thread t;
return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
((ForkJoinWorkerThread)t).workQueue.nextLocalTask() :
null;
}
/**
* If the current thread is operating in a ForkJoinPool,
* unschedules and returns, without executing, the next task
* queued by the current thread but not yet executed, if one is
* available, or if not available, a task that was forked by some
* other thread, if available. Availability may be transient, so a
* {@code null} result does not necessarily imply quiescence of
* the pool this task is operating in. This method is designed
* primarily to support extensions, and is unlikely to be useful
* otherwise.
*
* @return a task, or {@code null} if none are available
*/
protected static ForkJoinTask<?> pollTask() {
Thread t; ForkJoinWorkerThread wt;
return ((t = Thread.currentThread()) instanceof ForkJoinWorkerThread) ?
(wt = (ForkJoinWorkerThread)t).pool.nextTaskFor(wt.workQueue) :
null;
}
// tag operations
/**
* Returns the tag for this task.
*
* @return the tag for this task
* @since 1.8
*/
public final short getForkJoinTaskTag() {
return (short)status;
}
/**
* Atomically sets the tag value for this task.
*
* @param tag the tag value
* @return the previous value of the tag
* @since 1.8
*/
public final short setForkJoinTaskTag(short tag) {
for (int s;;) {
if (U.compareAndSwapInt(this, STATUS, s = status,
(s & ~SMASK) | (tag & SMASK)))
return (short)s;
}
}
/**
* Atomically conditionally sets the tag value for this task.
* Among other applications, tags can be used as visit markers
* in tasks operating on graphs, as in methods that check: {@code
* if (task.compareAndSetForkJoinTaskTag((short)0, (short)1))}
* before processing, otherwise exiting because the node has
* already been visited.
*
* @param e the expected tag value
* @param tag the new tag value
* @return {@code true} if successful; i.e., the current value was
* equal to e and is now tag.
* @since 1.8
*/
public final boolean compareAndSetForkJoinTaskTag(short e, short tag) {
for (int s;;) {
if ((short)(s = status) != e)
return false;
if (U.compareAndSwapInt(this, STATUS, s,
(s & ~SMASK) | (tag & SMASK)))
return true;
}
}
/**
* Adaptor for Runnables. This implements RunnableFuture
* to be compliant with AbstractExecutorService constraints
* when used in ForkJoinPool.
*/
static final class AdaptedRunnable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Runnable runnable;
T result;
AdaptedRunnable(Runnable runnable, T result) {
if (runnable == null) throw new NullPointerException();
this.runnable = runnable;
this.result = result; // OK to set this even before completion
}
public final T getRawResult() { return result; }
public final void setRawResult(T v) { result = v; }
public final boolean exec() { runnable.run(); return true; }
public final void run() { invoke(); }
private static final long serialVersionUID = 5232453952276885070L;
}
/**
* Adaptor for Runnables without results
*/
static final class AdaptedRunnableAction extends ForkJoinTask<Void>
implements RunnableFuture<Void> {
final Runnable runnable;
AdaptedRunnableAction(Runnable runnable) {
if (runnable == null) throw new NullPointerException();
this.runnable = runnable;
}
public final Void getRawResult() { return null; }
public final void setRawResult(Void v) { }
public final boolean exec() { runnable.run(); return true; }
public final void run() { invoke(); }
private static final long serialVersionUID = 5232453952276885070L;
}
/**
* Adaptor for Runnables in which failure forces worker exception
*/
static final class RunnableExecuteAction extends ForkJoinTask<Void> {
final Runnable runnable;
RunnableExecuteAction(Runnable runnable) {
if (runnable == null) throw new NullPointerException();
this.runnable = runnable;
}
public final Void getRawResult() { return null; }
public final void setRawResult(Void v) { }
public final boolean exec() { runnable.run(); return true; }
void internalPropagateException(Throwable ex) {
rethrow(ex); // rethrow outside exec() catches.
}
private static final long serialVersionUID = 5232453952276885070L;
}
/**
* Adaptor for Callables
*/
static final class AdaptedCallable<T> extends ForkJoinTask<T>
implements RunnableFuture<T> {
final Callable<? extends T> callable;
T result;
AdaptedCallable(Callable<? extends T> callable) {
if (callable == null) throw new NullPointerException();
this.callable = callable;
}
public final T getRawResult() { return result; }
public final void setRawResult(T v) { result = v; }
public final boolean exec() {
try {
result = callable.call();
return true;
} catch (Error err) {
throw err;
} catch (RuntimeException rex) {
throw rex;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public final void run() { invoke(); }
private static final long serialVersionUID = 2838392045355241008L;
}
/**
* Returns a new {@code ForkJoinTask} that performs the {@code run}
* method of the given {@code Runnable} as its action, and returns
* a null result upon {@link #join}.
*
* @param runnable the runnable action
* @return the task
*/
public static ForkJoinTask<?> adapt(Runnable runnable) {
return new AdaptedRunnableAction(runnable);
}
/**
* Returns a new {@code ForkJoinTask} that performs the {@code run}
* method of the given {@code Runnable} as its action, and returns
* the given result upon {@link #join}.
*
* @param runnable the runnable action
* @param result the result upon completion
* @param <T> the type of the result
* @return the task
*/
public static <T> ForkJoinTask<T> adapt(Runnable runnable, T result) {
return new AdaptedRunnable<T>(runnable, result);
}
/**
* Returns a new {@code ForkJoinTask} that performs the {@code call}
* method of the given {@code Callable} as its action, and returns
* its result upon {@link #join}, translating any checked exceptions
* encountered into {@code RuntimeException}.
*
* @param callable the callable action
* @param <T> the type of the callable's result
* @return the task
*/
public static <T> ForkJoinTask<T> adapt(Callable<? extends T> callable) {
return new AdaptedCallable<T>(callable);
}
// Serialization support
private static final long serialVersionUID = -7721805057305804111L;
/**
* Saves this task to a stream (that is, serializes it).
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
* @serialData the current run status and the exception thrown
* during execution, or {@code null} if none
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
s.defaultWriteObject();
s.writeObject(getException());
}
/**
* Reconstitutes this task from a stream (that is, deserializes it).
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
Object ex = s.readObject();
if (ex != null)
setExceptionalCompletion((Throwable)ex);
}
// Unsafe mechanics
private static final sun.misc.Unsafe U;
private static final long STATUS;
static {
exceptionTableLock = new ReentrantLock();
exceptionTableRefQueue = new ReferenceQueue<Object>();
exceptionTable = new ExceptionNode[EXCEPTION_MAP_CAPACITY];
try {
U = getUnsafe();
Class<?> k = ForkJoinTask.class;
STATUS = U.objectFieldOffset
(k.getDeclaredField("status"));
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Returns a sun.misc.Unsafe. Suitable for use in a 3rd party package.
* Replace with a simple call to Unsafe.getUnsafe when integrating
* into a jdk.
*
* @return a sun.misc.Unsafe
*/
private static sun.misc.Unsafe getUnsafe() {
try {
return sun.misc.Unsafe.getUnsafe();
} catch (SecurityException tryReflectionInstead) {}
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction<sun.misc.Unsafe>() {
public sun.misc.Unsafe run() throws Exception {
Class<sun.misc.Unsafe> k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
f.setAccessible(true);
Object x = f.get(null);
if (k.isInstance(x))
return k.cast(x);
}
throw new NoSuchFieldError("the Unsafe");
}});
} catch (java.security.PrivilegedActionException e) {
throw new RuntimeException("Could not initialize intrinsics",
e.getCause());
}
}
}
| 0true
|
src_main_java_jsr166e_ForkJoinTask.java
|
1,851 |
public class Merger implements Runnable {
Map<MapContainer, Collection<Record>> recordMap;
public Merger(Map<MapContainer, Collection<Record>> recordMap) {
this.recordMap = recordMap;
}
public void run() {
for (final MapContainer mapContainer : recordMap.keySet()) {
Collection<Record> recordList = recordMap.get(mapContainer);
String mergePolicyName = mapContainer.getMapConfig().getMergePolicy();
MapMergePolicy mergePolicy = getMergePolicy(mergePolicyName);
// todo number of records may be high. below can be optimized a many records can be send in single invocation
final MapMergePolicy finalMergePolicy = mergePolicy;
for (final Record record : recordList) {
// todo too many submission. should submit them in subgroups
nodeEngine.getExecutionService().submit("hz:map-merge", new Runnable() {
public void run() {
final SimpleEntryView entryView = createSimpleEntryView(record.getKey(), toData(record.getValue()), record);
MergeOperation operation = new MergeOperation(mapContainer.getName(), record.getKey(), entryView, finalMergePolicy);
try {
int partitionId = nodeEngine.getPartitionService().getPartitionId(record.getKey());
Future f = nodeEngine.getOperationService().invokeOnPartition(SERVICE_NAME, operation, partitionId);
f.get();
} catch (Throwable t) {
throw ExceptionUtil.rethrow(t);
}
}
});
}
}
}
}
| 1no label
|
hazelcast_src_main_java_com_hazelcast_map_MapService.java
|
16 |
public interface PersistenceService {
/**
* This method is invoked when starting a set of related persistence operations in the current thread. If the underlying
* persistence implementation is a database this will likely start a transaction. This method will
* generally only be used from code that operates outside the scope of an action, for example an action
* that does some processing in the background.
*/
void startRelatedOperations();
/**
* This method is invoked when completing a set of related persistence operations. This method must
* be invoked following {@link #startRelatedOperations()} and only a single time. T
* @param save if the operation should be saved, false if the operation should not be saved.
*/
void completeRelatedOperations(boolean save);
/**
* Checks if <code>tagId</code> is used on at least one component in the database.
* @param tagId tag ID
* @return true there are components tagged with <code>tagId</code>; false, otherwise.
*/
boolean hasComponentsTaggedBy(String tagId);
/**
* Returns the component with the specified external key and component type.
* @param externalKey to use for search criteria
* @param componentType to use with external key
* @param <T> type of component
* @return instance of component with the given type or null if the component cannot be found.
*/
<T extends AbstractComponent> T getComponent(String externalKey, Class<T> componentType);
/**
* Returns the component with the specified external key and component type.
* @param externalKey to use for search criteria
* @param componentType to use with external key
* @return instance of component with the given type or null if the component cannot
* be found.
*/
AbstractComponent getComponent(String externalKey, String componentType);
}
| 0true
|
mctcore_src_main_java_gov_nasa_arc_mct_api_persistence_PersistenceService.java
|
1,508 |
public class FailedNodeRoutingTests extends ElasticsearchAllocationTestCase {
private final ESLogger logger = Loggers.getLogger(FailedNodeRoutingTests.class);
@Test
public void simpleFailedNodeTest() {
AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.allow_rebalance", ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test1").numberOfShards(1).numberOfReplicas(1))
.put(IndexMetaData.builder("test2").numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test1"))
.addAsNew(metaData.index("test2"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
logger.info("start 4 nodes");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).put(newNode("node4"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("start all the primary shards, replicas will start initializing");
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
logger.info("start the replica shards");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNodes.node("node4").numberOfShardsWithState(STARTED), equalTo(1));
logger.info("remove 2 nodes where primaries are allocated, reroute");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.remove(routingTable.index("test1").shard(0).primaryShard().currentNodeId())
.remove(routingTable.index("test2").shard(0).primaryShard().currentNodeId())
)
.build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (RoutingNode routingNode : routingNodes) {
assertThat(routingNode.numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNode.numberOfShardsWithState(INITIALIZING), equalTo(1));
}
}
@Test
public void simpleFailedNodeTestNoReassign() {
AllocationService strategy = createAllocationService(settingsBuilder().put("cluster.routing.allocation.allow_rebalance", ClusterRebalanceAllocationDecider.ClusterRebalanceType.ALWAYS.toString()).build());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test1").numberOfShards(1).numberOfReplicas(1))
.put(IndexMetaData.builder("test2").numberOfShards(1).numberOfReplicas(1))
.build();
RoutingTable routingTable = RoutingTable.builder()
.addAsNew(metaData.index("test1"))
.addAsNew(metaData.index("test2"))
.build();
ClusterState clusterState = ClusterState.builder().metaData(metaData).routingTable(routingTable).build();
logger.info("start 4 nodes");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).put(newNode("node4"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
logger.info("start all the primary shards, replicas will start initializing");
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
logger.info("start the replica shards");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(1));
assertThat(routingNodes.node("node4").numberOfShardsWithState(STARTED), equalTo(1));
logger.info("remove 2 nodes where primaries are allocated, reroute");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.remove(routingTable.index("test1").shard(0).primaryShard().currentNodeId())
.remove(routingTable.index("test2").shard(0).primaryShard().currentNodeId())
)
.build();
prevRoutingTable = routingTable;
routingTable = strategy.rerouteWithNoReassign(clusterState).routingTable();
clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
for (RoutingNode routingNode : routingNodes) {
assertThat(routingNode.numberOfShardsWithState(STARTED), equalTo(1));
}
assertThat(routingNodes.unassigned().size(), equalTo(2));
}
}
| 0true
|
src_test_java_org_elasticsearch_cluster_routing_allocation_FailedNodeRoutingTests.java
|
172 |
public class LogIoUtils
{
private static final short CURRENT_FORMAT_VERSION = ( LogEntry.CURRENT_VERSION ) & 0xFF;
static final int LOG_HEADER_SIZE = 16;
public static long[] readLogHeader( FileSystemAbstraction fileSystem, File file ) throws IOException
{
StoreChannel channel = fileSystem.open( file, "r" );
try
{
return readLogHeader( ByteBuffer.allocateDirect( 100*1000 ), channel, true );
}
finally
{
channel.close();
}
}
public static long[] readLogHeader( ByteBuffer buffer, ReadableByteChannel channel,
boolean strict ) throws IOException
{
buffer.clear();
buffer.limit( LOG_HEADER_SIZE );
if ( channel.read( buffer ) != LOG_HEADER_SIZE )
{
if ( strict )
{
throw new IOException( "Unable to read log version and last committed tx" );
}
return null;
}
buffer.flip();
long version = buffer.getLong();
long previousCommittedTx = buffer.getLong();
long logFormatVersion = ( version >> 56 ) & 0xFF;
if ( CURRENT_FORMAT_VERSION != logFormatVersion )
{
throw new IllegalLogFormatException( CURRENT_FORMAT_VERSION, logFormatVersion );
}
version = version & 0x00FFFFFFFFFFFFFFL;
return new long[] { version, previousCommittedTx };
}
public static ByteBuffer writeLogHeader( ByteBuffer buffer, long logVersion,
long previousCommittedTxId )
{
buffer.clear();
buffer.putLong( logVersion | ( ( (long) CURRENT_FORMAT_VERSION ) << 56 ) );
buffer.putLong( previousCommittedTxId );
buffer.flip();
return buffer;
}
public static LogEntry readEntry( ByteBuffer buffer, ReadableByteChannel channel,
XaCommandFactory cf ) throws IOException
{
try
{
return readLogEntry( buffer, channel, cf );
}
catch ( ReadPastEndException e )
{
return null;
}
}
public static LogEntry readLogEntry( ByteBuffer buffer, ReadableByteChannel channel, XaCommandFactory cf )
throws IOException, ReadPastEndException
{
byte entry = readNextByte( buffer, channel );
switch ( entry )
{
case LogEntry.TX_START:
return readTxStartEntry( buffer, channel );
case LogEntry.TX_PREPARE:
return readTxPrepareEntry( buffer, channel );
case LogEntry.TX_1P_COMMIT:
return readTxOnePhaseCommitEntry( buffer, channel );
case LogEntry.TX_2P_COMMIT:
return readTxTwoPhaseCommitEntry( buffer, channel );
case LogEntry.COMMAND:
return readTxCommandEntry( buffer, channel, cf );
case LogEntry.DONE:
return readTxDoneEntry( buffer, channel );
case LogEntry.EMPTY:
return null;
default:
throw new IOException( "Unknown entry[" + entry + "]" );
}
}
private static LogEntry.Start readTxStartEntry( ByteBuffer buf,
ReadableByteChannel channel ) throws IOException, ReadPastEndException
{
byte globalIdLength = readNextByte( buf, channel );
byte branchIdLength = readNextByte( buf, channel );
byte globalId[] = new byte[globalIdLength];
readIntoBufferAndFlip( ByteBuffer.wrap( globalId ), channel, globalIdLength );
byte branchId[] = new byte[branchIdLength];
readIntoBufferAndFlip( ByteBuffer.wrap( branchId ), channel, branchIdLength );
int identifier = readNextInt( buf, channel );
@SuppressWarnings("unused")
int formatId = readNextInt( buf, channel );
int masterId = readNextInt( buf, channel );
int myId = readNextInt( buf, channel );
long timeWritten = readNextLong( buf, channel );
long latestCommittedTxWhenStarted = readNextLong( buf, channel );
// re-create the transaction
Xid xid = new XidImpl( globalId, branchId );
return new LogEntry.Start( xid, identifier, masterId, myId, -1, timeWritten, latestCommittedTxWhenStarted );
}
private static LogEntry.Prepare readTxPrepareEntry( ByteBuffer buf,
ReadableByteChannel channel ) throws IOException, ReadPastEndException
{
return new LogEntry.Prepare( readNextInt( buf, channel ), readNextLong( buf, channel ) );
}
private static LogEntry.OnePhaseCommit readTxOnePhaseCommitEntry( ByteBuffer buf,
ReadableByteChannel channel ) throws IOException, ReadPastEndException
{
return new LogEntry.OnePhaseCommit( readNextInt( buf, channel ),
readNextLong( buf, channel ), readNextLong( buf, channel ) );
}
private static LogEntry.Done readTxDoneEntry( ByteBuffer buf,
ReadableByteChannel channel ) throws IOException, ReadPastEndException
{
return new LogEntry.Done( readNextInt( buf, channel ) );
}
private static LogEntry.TwoPhaseCommit readTxTwoPhaseCommitEntry( ByteBuffer buf,
ReadableByteChannel channel ) throws IOException, ReadPastEndException
{
return new LogEntry.TwoPhaseCommit( readNextInt( buf, channel ),
readNextLong( buf, channel ), readNextLong( buf, channel ) );
}
private static LogEntry.Command readTxCommandEntry(
ByteBuffer buf, ReadableByteChannel channel, XaCommandFactory cf )
throws IOException, ReadPastEndException
{
int identifier = readNextInt( buf, channel );
XaCommand command = cf.readCommand( channel, buf );
if ( command == null )
{
return null;
}
return new LogEntry.Command( identifier, command );
}
public static void writeLogEntry( LogEntry entry, LogBuffer buffer )
throws IOException
{
if ( entry instanceof LogEntry.Command )
{
writeCommand( buffer, entry.getIdentifier(), ((LogEntry.Command) entry).getXaCommand() );
}
else if ( entry instanceof LogEntry.Start )
{
writeStart( buffer, entry.getIdentifier(), ( (LogEntry.Start) entry ).getXid(),
((LogEntry.Start) entry).getMasterId(), ((LogEntry.Start) entry).getLocalId(),
((LogEntry.Start) entry).getTimeWritten(),
((LogEntry.Start) entry).getLastCommittedTxWhenTransactionStarted() );
}
else if ( entry instanceof LogEntry.Done )
{
writeDone( buffer, entry.getIdentifier() );
}
else if ( entry instanceof LogEntry.OnePhaseCommit )
{
LogEntry.Commit commit = (LogEntry.Commit) entry;
writeCommit( false, buffer, commit.getIdentifier(), commit.getTxId(),
((LogEntry.OnePhaseCommit) entry).getTimeWritten() );
}
else if ( entry instanceof LogEntry.Prepare )
{
writePrepare( buffer, entry.getIdentifier(), ((LogEntry.Prepare) entry).getTimeWritten() );
}
else if ( entry instanceof LogEntry.TwoPhaseCommit )
{
LogEntry.Commit commit = (LogEntry.Commit) entry;
writeCommit( true, buffer, commit.getIdentifier(), commit.getTxId(),
((LogEntry.TwoPhaseCommit) entry).getTimeWritten() );
}
}
public static void writePrepare( LogBuffer buffer, int identifier, long timeWritten ) throws IOException
{
buffer.put( LogEntry.TX_PREPARE ).putInt( identifier ).putLong( timeWritten );
}
public static void writeCommit( boolean twoPhase, LogBuffer buffer, int identifier, long txId,
long timeWritten ) throws IOException
{
buffer.put( twoPhase ? LogEntry.TX_2P_COMMIT : LogEntry.TX_1P_COMMIT )
.putInt( identifier ).putLong( txId ).putLong( timeWritten );
}
public static void writeDone( LogBuffer buffer, int identifier ) throws IOException
{
buffer.put( LogEntry.DONE ).putInt( identifier );
}
public static void writeDone( ByteBuffer buffer, int identifier )
{
buffer.put( LogEntry.DONE ).putInt( identifier );
}
public static void writeStart( LogBuffer buffer, int identifier, Xid xid, int masterId, int myId, long timeWritten,
long latestCommittedTxWhenStarted )
throws IOException
{
byte globalId[] = xid.getGlobalTransactionId();
byte branchId[] = xid.getBranchQualifier();
int formatId = xid.getFormatId();
buffer.put( LogEntry.TX_START )
.put( (byte) globalId.length )
.put( (byte) branchId.length )
.put( globalId ).put( branchId )
.putInt( identifier )
.putInt( formatId )
.putInt( masterId )
.putInt( myId )
.putLong( timeWritten )
.putLong( latestCommittedTxWhenStarted );
}
public static void writeCommand( LogBuffer buffer, int identifier, XaCommand command )
throws IOException
{
buffer.put( LogEntry.COMMAND ).putInt( identifier );
command.writeToFile( buffer );
}
private static int readNextInt( ByteBuffer buf, ReadableByteChannel channel )
throws IOException, ReadPastEndException
{
return readIntoBufferAndFlip( buf, channel, 4 ).getInt();
}
private static long readNextLong( ByteBuffer buf, ReadableByteChannel channel )
throws IOException, ReadPastEndException
{
return readIntoBufferAndFlip( buf, channel, 8 ).getLong();
}
public static byte readNextByte( ByteBuffer buf, ReadableByteChannel channel )
throws IOException, ReadPastEndException
{
return readIntoBufferAndFlip( buf, channel, 1 ).get();
}
private static ByteBuffer readIntoBufferAndFlip( ByteBuffer buf, ReadableByteChannel channel,
int numberOfBytes ) throws IOException, ReadPastEndException
{
buf.clear();
buf.limit( numberOfBytes );
if ( channel.read( buf ) != buf.limit() )
{
throw new ReadPastEndException();
}
buf.flip();
return buf;
}
}
| 0true
|
community_kernel_src_main_java_org_neo4j_kernel_impl_transaction_xaframework_LogIoUtils.java
|
485 |
@Service("blExploitProtectionService")
public class ExploitProtectionServiceImpl implements ExploitProtectionService {
private static final String CSRFTOKEN = "csrfToken";
private static final String CSRFTOKENPARAMETER = "csrfToken";
private static final Log LOG = LogFactory.getLog(ExploitProtectionServiceImpl.class);
private static class Handler extends URLStreamHandler {
@Override
protected URLConnection openConnection(URL u) throws IOException {
URL resourceUrl = getClass().getClassLoader().getResource(u.getPath());
return resourceUrl.openConnection();
}
}
private static Policy getAntiSamyPolicy(String policyFileLocation) {
try {
URL url = new URL(null, policyFileLocation, new Handler());
return Policy.getInstance(url);
} catch (Exception e) {
throw new RuntimeException("Unable to create URL", e);
}
}
private static final String DEFAULTANTISAMYPOLICYFILELOCATION = "classpath:antisamy-myspace-1.4.4.xml";
protected String antiSamyPolicyFileLocation = DEFAULTANTISAMYPOLICYFILELOCATION;
//this is thread safe
private Policy antiSamyPolicy = getAntiSamyPolicy(antiSamyPolicyFileLocation);
//this is thread safe for the usage of scan()
private AntiSamy as = new AntiSamy();
protected boolean xsrfProtectionEnabled = true;
protected boolean xssProtectionEnabled = true;
@Override
public String cleanString(String string) throws ServiceException {
if (!xssProtectionEnabled || StringUtils.isEmpty(string)) {
return string;
}
try {
CleanResults results = as.scan(string, antiSamyPolicy);
return results.getCleanHTML();
} catch (Exception e) {
LOG.error("Unable to clean the passed in entity values", e);
throw new ServiceException("Unable to clean the passed in entity values", e);
}
}
@Override
public String cleanStringWithResults(String string) throws ServiceException {
if (!xssProtectionEnabled || StringUtils.isEmpty(string)) {
return string;
}
try {
CleanResults results = as.scan(string, antiSamyPolicy);
if (results.getNumberOfErrors() > 0) {
throw new CleanStringException(results);
}
return results.getCleanHTML();
} catch (CleanStringException e) {
throw e;
} catch (Exception e) {
StringBuilder sb = new StringBuilder();
sb.append("Unable to clean the passed in entity values");
sb.append("\nNote - ");
sb.append(getAntiSamyPolicyFileLocation());
sb.append(" policy in effect. Set a new policy file to modify validation behavior/strictness.");
LOG.error(sb.toString(), e);
throw new ServiceException(sb.toString(), e);
}
}
@Override
public void compareToken(String passedToken) throws ServiceException {
if (xsrfProtectionEnabled) {
if (!getCSRFToken().equals(passedToken)) {
throw new ServiceException("XSRF token mismatch (" + passedToken + "). Session may be expired.");
} else {
LOG.debug("Validated CSRF token");
}
}
}
@Override
public String getCSRFToken() throws ServiceException {
HttpServletRequest request = ((ServletRequestAttributes) RequestContextHolder.getRequestAttributes()).getRequest();
HttpSession session = request.getSession();
String token = (String) session.getAttribute(CSRFTOKEN);
if (StringUtils.isEmpty(token)) {
try {
token = RandomGenerator.generateRandomId("SHA1PRNG", 32);
} catch (NoSuchAlgorithmException e) {
LOG.error("Unable to generate random number", e);
throw new ServiceException("Unable to generate random number", e);
}
session.setAttribute(CSRFTOKEN, token);
}
return token;
}
@Override
public String getAntiSamyPolicyFileLocation() {
return antiSamyPolicyFileLocation;
}
@Override
public void setAntiSamyPolicyFileLocation(String antiSamyPolicyFileLocation) {
this.antiSamyPolicyFileLocation = antiSamyPolicyFileLocation;
antiSamyPolicy = getAntiSamyPolicy(antiSamyPolicyFileLocation);
}
public boolean isXsrfProtectionEnabled() {
return xsrfProtectionEnabled;
}
public void setXsrfProtectionEnabled(boolean xsrfProtectionEnabled) {
this.xsrfProtectionEnabled = xsrfProtectionEnabled;
}
public boolean isXssProtectionEnabled() {
return xssProtectionEnabled;
}
public void setXssProtectionEnabled(boolean xssProtectionEnabled) {
this.xssProtectionEnabled = xssProtectionEnabled;
}
@Override
public String getCsrfTokenParameter() {
return CSRFTOKENPARAMETER;
}
}
| 1no label
|
common_src_main_java_org_broadleafcommerce_common_security_service_ExploitProtectionServiceImpl.java
|
1,206 |
floatIntMap = build(type, limit, smartSize, availableProcessors, new Recycler.C<FloatIntOpenHashMap>() {
@Override
public FloatIntOpenHashMap newInstance(int sizing) {
return new FloatIntOpenHashMap(size(sizing));
}
@Override
public void clear(FloatIntOpenHashMap value) {
value.clear();
}
});
| 0true
|
src_main_java_org_elasticsearch_cache_recycler_CacheRecycler.java
|
537 |
@Deprecated
public class ResourceBundleExtensionPoint {
private String[] basenameExtensions = new String[0];
public String[] getBasenameExtensions() {
return basenameExtensions;
}
public void setBasenameExtensions(String[] basenameExtensions) {
this.basenameExtensions = basenameExtensions;
}
}
| 0true
|
common_src_main_java_org_broadleafcommerce_common_util_ResourceBundleExtensionPoint.java
|
113 |
@Entity
@Inheritance(strategy = InheritanceType.JOINED)
@Table(name = "BLC_PAGE_RULE")
public class PageRuleImpl implements PageRule {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(generator= "PageRuleId")
@GenericGenerator(
name="PageRuleId",
strategy="org.broadleafcommerce.common.persistence.IdOverrideTableGenerator",
parameters = {
@Parameter(name="segment_value", value="PageRuleImpl"),
@Parameter(name="entity_name", value="org.broadleafcommerce.cms.page.domain.PageRuleImpl")
}
)
@Column(name = "PAGE_RULE_ID")
protected Long id;
@Lob
@Type(type = "org.hibernate.type.StringClobType")
@Column(name = "MATCH_RULE", length = Integer.MAX_VALUE - 1)
protected String matchRule;
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.StructuredContentRule#getId()
*/
@Override
public Long getId() {
return id;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.StructuredContentRule#setId(java.lang.Long)
*/
@Override
public void setId(Long id) {
this.id = id;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.StructuredContentRule#getMatchRule()
*/
@Override
public String getMatchRule() {
return matchRule;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.core.offer.domain.StructuredContentRule#setMatchRule(java.lang.String)
*/
@Override
public void setMatchRule(String matchRule) {
this.matchRule = matchRule;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((matchRule == null) ? 0 : matchRule.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PageRuleImpl other = (PageRuleImpl) obj;
if (id != null && other.id != null) {
return id.equals(other.id);
}
if (matchRule == null) {
if (other.matchRule != null)
return false;
} else if (!matchRule.equals(other.matchRule))
return false;
return true;
}
@Override
public PageRule cloneEntity() {
PageRuleImpl newField = new PageRuleImpl();
newField.matchRule = matchRule;
return newField;
}
}
| 1no label
|
admin_broadleaf-contentmanagement-module_src_main_java_org_broadleafcommerce_cms_page_domain_PageRuleImpl.java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.