_id
stringlengths 2
7
| title
stringlengths 3
140
| partition
stringclasses 3
values | text
stringlengths 73
34.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q176900
|
ResponseHeadersConfigurer.addNoCacheHeaders
|
test
|
private static void addNoCacheHeaders(final Map<String, String> map) {
map.put(HttpHeader.PRAGMA.toString(), "no-cache");
map.put(HttpHeader.CACHE_CONTROL.toString(), "no-cache");
map.put(HttpHeader.EXPIRES.toString(), "0");
}
|
java
|
{
"resource": ""
}
|
q176901
|
WroUtil.getPathInfoFromLocation
|
test
|
public static String getPathInfoFromLocation(final HttpServletRequest request, final String location) {
if (StringUtils.isEmpty(location)) {
throw new IllegalArgumentException("Location cannot be empty string!");
}
final String contextPath = request.getContextPath();
if (contextPath != null) {
if (startsWithIgnoreCase(location, contextPath)) {
return location.substring(contextPath.length());
} else {
return location;
}
}
final String noSlash = location.substring(1);
final int nextSlash = noSlash.indexOf('/');
if (nextSlash == -1) {
return "";
}
return noSlash.substring(nextSlash);
}
|
java
|
{
"resource": ""
}
|
q176902
|
WroUtil.getServletPathFromLocation
|
test
|
public static String getServletPathFromLocation(final HttpServletRequest request, final String location) {
return location.replace(getPathInfoFromLocation(request, location), StringUtils.EMPTY);
}
|
java
|
{
"resource": ""
}
|
q176903
|
WroUtil.matchesUrl
|
test
|
public static boolean matchesUrl(final HttpServletRequest request, final String path) {
final Pattern pattern = Pattern.compile(".*" + path + "[/]?", Pattern.CASE_INSENSITIVE);
if (request.getRequestURI() != null) {
final Matcher m = pattern.matcher(request.getRequestURI());
return m.matches();
}
return false;
}
|
java
|
{
"resource": ""
}
|
q176904
|
WroUtil.loadRegexpWithKey
|
test
|
public static String loadRegexpWithKey(final String key) {
InputStream stream = null;
try {
stream = WroUtil.class.getResourceAsStream("regexp.properties");
final Properties props = new RegexpProperties().load(stream);
return props.getProperty(key);
} catch (final IOException e) {
throw new WroRuntimeException("Could not load pattern with key: " + key + " from property file", e);
} finally {
closeQuietly(stream);
}
}
|
java
|
{
"resource": ""
}
|
q176905
|
WroUtil.safeCopy
|
test
|
public static void safeCopy(final Reader reader, final Writer writer)
throws IOException {
try {
IOUtils.copy(reader, writer);
} finally {
IOUtils.closeQuietly(reader);
IOUtils.closeQuietly(writer);
}
}
|
java
|
{
"resource": ""
}
|
q176906
|
WroUtil.createTempFile
|
test
|
public static File createTempFile(final String extension) {
try {
final String fileName = String.format("wro4j-%s.%s", UUID.randomUUID().toString(), extension);
final File file = new File(createTempDirectory(), fileName);
file.createNewFile();
return file;
} catch (final IOException e) {
throw WroRuntimeException.wrap(e);
}
}
|
java
|
{
"resource": ""
}
|
q176907
|
WroUtil.cleanImageUrl
|
test
|
public static final String cleanImageUrl(final String imageUrl) {
notNull(imageUrl);
return imageUrl.replace('\'', ' ').replace('\"', ' ').trim();
}
|
java
|
{
"resource": ""
}
|
q176908
|
ServletContextAttributeHelper.setAttribute
|
test
|
final void setAttribute(final Attribute attribute, final Object object) {
Validate.notNull(attribute);
LOG.debug("setting attribute: {} with value: {}", attribute, object);
Validate.isTrue(attribute.isValid(object), object + " is not of valid subType for attribute: " + attribute);
servletContext.setAttribute(getAttributeName(attribute), object);
}
|
java
|
{
"resource": ""
}
|
q176909
|
DataUriGenerator.generateDataURI
|
test
|
public String generateDataURI(final InputStream inputStream, final String fileName)
throws IOException {
final StringWriter writer = new StringWriter();
final byte[] bytes = IOUtils.toByteArray(inputStream);
inputStream.close();
final String mimeType = getMimeType(fileName);
// actually write
generateDataURI(bytes, writer, mimeType);
return writer.toString();
}
|
java
|
{
"resource": ""
}
|
q176910
|
DataUriGenerator.generateDataURI
|
test
|
private void generateDataURI(final byte[] bytes, final Writer out, final String mimeType)
throws IOException {
// create the output
final StringBuffer buffer = new StringBuffer();
buffer.append(DATA_URI_PREFIX);
// add MIME type
buffer.append(mimeType);
// output base64-encoding
buffer.append(";base64,");
buffer.append(Base64.encodeBytes(bytes));
// output to writer
out.write(buffer.toString());
}
|
java
|
{
"resource": ""
}
|
q176911
|
Context.set
|
test
|
public static void set(final Context context, final WroConfiguration config) {
notNull(context);
notNull(config);
context.setConfig(config);
final String correlationId = generateCorrelationId();
CORRELATION_ID.set(correlationId);
CONTEXT_MAP.put(correlationId, context);
}
|
java
|
{
"resource": ""
}
|
q176912
|
Context.unset
|
test
|
public static void unset() {
final String correlationId = CORRELATION_ID.get();
if (correlationId != null) {
CONTEXT_MAP.remove(correlationId);
}
CORRELATION_ID.remove();
}
|
java
|
{
"resource": ""
}
|
q176913
|
ResourceWatcher.check
|
test
|
public void check(final CacheKey cacheKey, final Callback callback) {
notNull(cacheKey);
LOG.debug("started");
final StopWatch watch = new StopWatch();
watch.start("detect changes");
try {
final Group group = new WroModelInspector(modelFactory.create()).getGroupByName(cacheKey.getGroupName());
if (isGroupChanged(group.collectResourcesOfType(cacheKey.getType()), callback)) {
callback.onGroupChanged(cacheKey);
cacheStrategy.put(cacheKey, null);
}
resourceChangeDetector.reset();
} catch (final Exception e) {
onException(e);
} finally {
watch.stop();
LOG.debug("resource watcher info: {}", watch.prettyPrint());
}
}
|
java
|
{
"resource": ""
}
|
q176914
|
ResourceWatcher.onException
|
test
|
protected void onException(final Exception e) {
// not using ERROR log intentionally, since this error is not that important
LOG.info("Could not check for resource changes because: {}", e.getMessage());
LOG.debug("[FAIL] detecting resource change ", e);
}
|
java
|
{
"resource": ""
}
|
q176915
|
ResourceWatcher.checkResourceChange
|
test
|
private void checkResourceChange(final Resource resource, final Group group, final Callback callback,
final AtomicBoolean isChanged)
throws Exception {
if (isChanged(resource, group.getName())) {
isChanged.compareAndSet(false, true);
callback.onResourceChanged(resource);
lifecycleCallback.onResourceChanged(resource);
}
}
|
java
|
{
"resource": ""
}
|
q176916
|
ResourceChangeInfo.updateHashForGroup
|
test
|
public void updateHashForGroup(final String hash, final String groupName) {
notNull(groupName);
this.currentHash = hash;
if (isChangedHash()) {
LOG.debug("Group {} has changed", groupName);
//remove all persisted groups. Starting over..
groups.clear();
}
}
|
java
|
{
"resource": ""
}
|
q176917
|
Group.hasResourcesOfType
|
test
|
public final boolean hasResourcesOfType(final ResourceType resourceType) {
notNull(resourceType, "ResourceType cannot be null!");
for (final Resource resource : resources) {
if (resourceType.equals(resource.getType())) {
return true;
}
}
return false;
}
|
java
|
{
"resource": ""
}
|
q176918
|
NodeTypeScriptProcessor.createProcess
|
test
|
private Process createProcess(final File sourceFile, final File destFile)
throws IOException {
notNull(sourceFile);
final String[] commandLine = getCommandLine(sourceFile.getPath(), destFile.getPath());
LOG.debug("CommandLine arguments: {}", Arrays.asList(commandLine));
final Process process = new ProcessBuilder(commandLine).redirectErrorStream(true).start();
//Gobblers responsible for reading stream to avoid blocking of the process when the buffer is full.
final StreamGobbler errorGobbler = new StreamGobbler(process.getErrorStream(), "ERROR");
// any output?
final StreamGobbler outputGobbler = new StreamGobbler(process.getInputStream(), "OUTPUT");
// kick them off
errorGobbler.start();
outputGobbler.start();
return process;
}
|
java
|
{
"resource": ""
}
|
q176919
|
Base64.encodeObject
|
test
|
public static String encodeObject(final java.io.Serializable serializableObject)
throws java.io.IOException {
return encodeObject(serializableObject, NO_OPTIONS);
}
|
java
|
{
"resource": ""
}
|
q176920
|
XmlModelFactory.processGroups
|
test
|
private void processGroups(final Document document) {
// handle imports
final NodeList groupNodeList = document.getElementsByTagName(TAG_GROUP);
for (int i = 0; i < groupNodeList.getLength(); i++) {
final Element groupElement = (Element) groupNodeList.item(i);
final String name = groupElement.getAttribute(ATTR_GROUP_NAME);
allGroupElements.put(name, groupElement);
}
}
|
java
|
{
"resource": ""
}
|
q176921
|
XmlModelFactory.parseGroup
|
test
|
private Collection<Resource> parseGroup(final Element element) {
final String name = element.getAttribute(ATTR_GROUP_NAME);
final String isAbstractAsString = element.getAttribute(ATTR_GROUP_ABSTRACT);
final boolean isAbstractGroup = StringUtils.isNotEmpty(isAbstractAsString) && Boolean.valueOf(isAbstractAsString);
if (groupsInProcess.contains(name)) {
throw new RecursiveGroupDefinitionException("Infinite Recursion detected for the group: " + name
+ ". Recursion path: " + groupsInProcess);
}
LOG.debug("\tadding group: {}", name);
groupsInProcess.add(name);
// skip if this group is already parsed
final Group parsedGroup = new WroModelInspector(model).getGroupByName(name);
if (parsedGroup != null) {
// remove before returning
// this group is parsed, remove from unparsed groups collection
groupsInProcess.remove(name);
return parsedGroup.getResources();
}
final Group group = createGroup(element);
// this group is parsed, remove from unparsed collection
groupsInProcess.remove(name);
if (!isAbstractGroup) {
// add only non abstract groups
model.addGroup(group);
}
return group.getResources();
}
|
java
|
{
"resource": ""
}
|
q176922
|
XmlModelFactory.createGroup
|
test
|
protected Group createGroup(final Element element) {
final String name = element.getAttribute(ATTR_GROUP_NAME);
final Group group = new Group(name);
final List<Resource> resources = new ArrayList<Resource>();
final NodeList resourceNodeList = element.getChildNodes();
for (int i = 0; i < resourceNodeList.getLength(); i++) {
final Node node = resourceNodeList.item(i);
if (node instanceof Element) {
final Element resourceElement = (Element) node;
parseResource(resourceElement, resources);
}
}
group.setResources(resources);
return group;
}
|
java
|
{
"resource": ""
}
|
q176923
|
XmlModelFactory.parseResource
|
test
|
private void parseResource(final Element resourceElement, final Collection<Resource> resources) {
final String tagName = resourceElement.getTagName();
final String uri = resourceElement.getTextContent();
if (TAG_GROUP_REF.equals(tagName)) {
// uri in this case is the group name
resources.addAll(getResourcesForGroup(uri));
}
if (getResourceType(resourceElement) != null) {
final Resource resource = createResource(resourceElement);
LOG.debug("\t\tadding resource: {}", resource);
resources.add(resource);
}
}
|
java
|
{
"resource": ""
}
|
q176924
|
XmlModelFactory.getResourcesForGroup
|
test
|
private Collection<Resource> getResourcesForGroup(final String groupName) {
final WroModelInspector modelInspector = new WroModelInspector(model);
final Group foundGroup = modelInspector.getGroupByName(groupName);
if (foundGroup == null) {
final Element groupElement = allGroupElements.get(groupName);
if (groupElement == null) {
throw new WroRuntimeException("Invalid group-ref: " + groupName);
}
return parseGroup(groupElement);
}
return foundGroup.getResources();
}
|
java
|
{
"resource": ""
}
|
q176925
|
ElkTimer.log
|
test
|
public void log(Logger logger, LogLevel priority) {
if (LoggerWrap.isEnabledFor(logger, priority)) {
String timerLabel;
if (threadId != 0) {
timerLabel = name + " (thread " + threadId + ")";
} else if (threadCount > 1) {
timerLabel = name + " (over " + threadCount + " threads)";
} else {
timerLabel = name;
}
if (todoFlags == RECORD_NONE) {
LoggerWrap.log(logger, priority, "Timer " + timerLabel + " recorded "
+ measurements + " run(s), no times taken");
} else {
String labels = "";
String values = "";
String separator;
if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) {
labels += "CPU";
values += totalCpuTime / 1000000;
separator = "/";
} else {
separator = "";
}
if ((todoFlags & RECORD_WALLTIME) != 0) {
labels += separator + "Wall";
values += separator + totalWallTime / 1000000;
}
if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) {
labels += "/CPU avg";
values += "/" + (float) (totalCpuTime) / measurements
/ 1000000;
}
if ((todoFlags & RECORD_WALLTIME) != 0) {
labels += "/Wall avg";
values += "/" + (float) (totalWallTime) / measurements
/ 1000000;
}
if (threadCount > 1) {
if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) {
labels += "/CPU per thread";
values += "/" + (float) (totalCpuTime) / threadCount
/ 1000000;
}
if ((todoFlags & RECORD_WALLTIME) != 0) {
labels += "/Wall per thread";
values += "/" + (float) (totalWallTime) / threadCount
/ 1000000;
}
}
LoggerWrap.log(logger, priority, "Time for " + timerLabel + " for "
+ measurements + " run(s) " + labels + " (ms): "
+ values);
}
if (isRunning) {
logger.warn("Timer " + timerLabel
+ " logged while it was still running");
}
}
}
|
java
|
{
"resource": ""
}
|
q176926
|
ElkTimer.stopNamedTimer
|
test
|
public static long stopNamedTimer(String timerName, int todoFlags,
long threadId) {
ElkTimer key = new ElkTimer(timerName, todoFlags, threadId);
if (registeredTimers.containsKey(key)) {
return registeredTimers.get(key).stop();
} else {
return -1;
}
}
|
java
|
{
"resource": ""
}
|
q176927
|
ElkTimer.getNamedTimer
|
test
|
public static ElkTimer getNamedTimer(String timerName, int todoFlags) {
return getNamedTimer(timerName, todoFlags, Thread.currentThread()
.getId());
}
|
java
|
{
"resource": ""
}
|
q176928
|
ElkTimer.getNamedTimer
|
test
|
public static ElkTimer getNamedTimer(String timerName, int todoFlags,
long threadId) {
ElkTimer key = new ElkTimer(timerName, todoFlags, threadId);
ElkTimer previous = registeredTimers.putIfAbsent(key, key);
if (previous != null) {
return previous;
}
// else
return key;
}
|
java
|
{
"resource": ""
}
|
q176929
|
ClassExpressionSaturationFactory.printStatistics
|
test
|
public void printStatistics() {
ruleApplicationFactory_.getSaturationStatistics().print(LOGGER_);
if (LOGGER_.isDebugEnabled()) {
if (aggregatedStats_.jobsSubmittedNo > 0)
LOGGER_.debug(
"Saturation Jobs Submitted=Done+Processed: {}={}+{}",
aggregatedStats_.jobsSubmittedNo,
aggregatedStats_.jobsAlreadyDoneNo,
aggregatedStats_.jobsProcessedNo);
LOGGER_.debug("Locks: " + aggregatedStats_.locks);
}
}
|
java
|
{
"resource": ""
}
|
q176930
|
ClassExpressionSaturationFactory.wakeUpWorkers
|
test
|
private void wakeUpWorkers() {
if (!workersWaiting_) {
return;
}
stopWorkersLock_.lock();
try {
workersWaiting_ = false;
thereAreContextsToProcess_.signalAll();
} finally {
stopWorkersLock_.unlock();
}
}
|
java
|
{
"resource": ""
}
|
q176931
|
ClassExpressionSaturationFactory.updateProcessedCounters
|
test
|
private void updateProcessedCounters(int snapshotFinishedWorkers) {
if (isInterrupted()) {
wakeUpWorkers();
return;
}
if (countStartedWorkers_.get() > snapshotFinishedWorkers) {
/*
* We are not the last worker processing the saturation state, so
* the current jobs and contexts may not be processed yet.
*/
return;
}
/*
* Otherwise we were the last worker processing the saturation state;
* take the values for current jobs and contexts and verify that we are
* still the last worker (thus the order is important here).
*/
int snapshotCountJobsSubmitted = countJobsSubmittedUpper_.get();
int snapshotCountContextNonSaturated = saturationState_
.getContextMarkNonSaturatedCount();
int snapshotCountStartedWorkers = countStartedWorkers_.get();
if (snapshotCountStartedWorkers > snapshotFinishedWorkers) {
/* no longer the last worker */
return;
}
/*
* If we arrive here, #snapshotCountJobsSubmitted and
* #snapshotCountContextNonSaturated represents at least the number of
* jobs processed and saturated contexts. Furthermore, since we took
* them in this order, we know that all contexts for the processed jobs
* were created, saturated, and counted. Now, we updated the
* corresponding counters for the processed contexts and jobs but in the
* reversed order to make sure that for every job considered to be
* processed all contexts were already considered to be processed.
*/
if (updateIfSmaller(countContextsSaturatedLower_,
snapshotCountContextNonSaturated)) {
/*
* Sleeping workers can now take new inputs.
*/
wakeUpWorkers();
}
updateIfSmaller(countJobsProcessedLower_, snapshotCountJobsSubmitted);
}
|
java
|
{
"resource": ""
}
|
q176932
|
ClassExpressionSaturationFactory.updateFinishedCounters
|
test
|
private void updateFinishedCounters(ThisStatistics localStatistics)
throws InterruptedException {
int snapshotJobsProcessed = countJobsProcessedLower_.get();
/*
* ensure that all contexts for processed jobs are marked as saturated
*/
for (;;) {
int snapshotCountContextsSaturatedLower = countContextsSaturatedLower_
.get();
saturationState_
.setContextsSaturated(snapshotCountContextsSaturatedLower);
if (saturationState_
.getContextSetSaturatedCount() < snapshotCountContextsSaturatedLower) {
/*
* this means that some other worker also sets contexts as
* saturated, then it will mark the finished jobs instead
*/
return;
}
/*
* ensure that the counter for processed jobs is still up to date
*/
int updatedSnapshotJobsProcessed = countJobsProcessedLower_.get();
if (updatedSnapshotJobsProcessed == snapshotJobsProcessed) {
break;
}
/* else refresh counters */
snapshotJobsProcessed = updatedSnapshotJobsProcessed;
}
/*
* ensure that all processed jobs are finished
*/
for (;;) {
int snapshotJobsFinished = countJobsFinishedUpper_.get();
if (snapshotJobsFinished >= snapshotJobsProcessed) {
break;
}
/*
* update the finished context counter at least to the taken
* snapshot value and mark the corresponding number of jobs as
* processed
*/
if (!countJobsFinishedUpper_.compareAndSet(snapshotJobsFinished,
snapshotJobsFinished + 1)) {
/* retry */
continue;
}
// else
J nextJob = jobsInProgress_.poll();
IndexedContextRoot root = nextJob.getInput();
Context rootSaturation = saturationState_.getContext(root);
if (rootSaturation.isInitialized()
&& !rootSaturation.isSaturated()) {
LOGGER_.error("{}: context for a finished job not saturated!",
rootSaturation);
}
nextJob.setOutput(rootSaturation);
LOGGER_.trace("{}: saturation finished", root);
localStatistics.jobsProcessedNo++;
listener_.notifyFinished(nextJob);// can be interrupted
}
}
|
java
|
{
"resource": ""
}
|
q176933
|
ClassExpressionSaturationFactory.updateIfSmaller
|
test
|
private static boolean updateIfSmaller(AtomicInteger counter, int value) {
for (;;) {
int snapshotCoutner = counter.get();
if (snapshotCoutner >= value)
return false;
if (counter.compareAndSet(snapshotCoutner, value))
return true;
}
}
|
java
|
{
"resource": ""
}
|
q176934
|
DummyRuleVisitor.defaultVisit
|
test
|
protected <P> O defaultVisit(Rule<P> rule, P premise,
ContextPremises premises, ClassInferenceProducer producer) {
if (LOGGER_.isTraceEnabled()) {
LOGGER_.trace("ignore {} by {} in {}", premise, rule, premises);
}
return null;
}
|
java
|
{
"resource": ""
}
|
q176935
|
ObjectPropertyTaxonomyComputationFactory.instertIntoTaxonomy
|
test
|
private void instertIntoTaxonomy(final IndexedObjectProperty property) {
/*
* @formatter:off
*
* Transitive reduction and taxonomy computation
* if sub-properties of a sub-property contain this property,
* they are equivalent
* if a property is a strict sub-property of another strict sub-property,
* it is not direct
*
* @formatter:on
*/
final Map<IndexedObjectProperty, ElkObjectProperty> equivalent = collectEquivalent(
property);
if (equivalent == null) {
// Equivalent to top.
return;
}
final Map<IndexedObjectProperty, Collection<? extends ElkObjectProperty>> subEquivalent = new ArrayHashMap<IndexedObjectProperty, Collection<? extends ElkObjectProperty>>();
final Set<IndexedObjectProperty> indirect = new ArrayHashSet<IndexedObjectProperty>();
for (final IndexedObjectProperty subProperty : property.getSaturated()
.getSubProperties()) {
if (equivalent.containsKey(subProperty)) {
// subProperty is not strict
continue;
}
// subProperty is strict
final Map<IndexedObjectProperty, ElkObjectProperty> subEq = collectEquivalent(
subProperty);
// should not be null, because top cannot be a strict sub-property
subEquivalent.put(subProperty, subEq.values());
for (final IndexedObjectProperty subSubProperty : subProperty
.getSaturated().getSubProperties()) {
if (!subEq.containsKey(subSubProperty)) {
// strict
indirect.add(subSubProperty);
}
}
}
/*
* If property is not equivalent to bottom and there are no strict sub
* properties, add the bottom as a default sub property.
*/
if (subEquivalent.isEmpty() && (indexedBottomProperty_ == null
|| !equivalent.containsKey(indexedBottomProperty_))) {
outputProcessor_
.visit(new TransitiveReductionOutputEquivalentDirectImpl<ElkObjectProperty>(
equivalent.values(), defaultDirectSubproperties_));
return;
}
// else
final Collection<Collection<? extends ElkObjectProperty>> direct = Operations
.map(subEquivalent.entrySet(),
new Operations.Transformation<Map.Entry<IndexedObjectProperty, Collection<? extends ElkObjectProperty>>, Collection<? extends ElkObjectProperty>>() {
@Override
public Collection<? extends ElkObjectProperty> transform(
final Entry<IndexedObjectProperty, Collection<? extends ElkObjectProperty>> element) {
if (indirect.contains(element.getKey())) {
return null;
} else {
return element.getValue();
}
}
});
outputProcessor_
.visit(new TransitiveReductionOutputEquivalentDirectImpl<ElkObjectProperty>(
equivalent.values(), direct));
}
|
java
|
{
"resource": ""
}
|
q176936
|
AbstractReasonerState.ensureLoading
|
test
|
public synchronized void ensureLoading() throws ElkException {
if (!isLoadingFinished()) {
if (isIncrementalMode()) {
if (!stageManager.incrementalAdditionStage.isCompleted()) {
complete(stageManager.incrementalAdditionStage);
}
} else {
if (!stageManager.contextInitializationStage.isCompleted()) {
complete(stageManager.contextInitializationStage);
}
}
LOGGER_.trace("Reset axiom loading");
stageManager.inputLoadingStage.invalidateRecursive();
// Invalidate stages at the beginnings of the dependency chains.
stageManager.contextInitializationStage.invalidateRecursive();
stageManager.incrementalCompletionStage.invalidateRecursive();
}
complete(stageManager.inputLoadingStage);
}
|
java
|
{
"resource": ""
}
|
q176937
|
AbstractReasonerState.restoreSaturation
|
test
|
private void restoreSaturation() throws ElkException {
ensureLoading();
final boolean changed;
if (isIncrementalMode()) {
changed = !stageManager.incrementalTaxonomyCleaningStage
.isCompleted();
complete(stageManager.incrementalTaxonomyCleaningStage);
} else {
changed = !stageManager.contextInitializationStage.isCompleted();
complete(stageManager.contextInitializationStage);
}
if (changed) {
stageManager.consistencyCheckingStage.invalidateRecursive();
}
}
|
java
|
{
"resource": ""
}
|
q176938
|
AbstractReasonerState.isInconsistent
|
test
|
public synchronized boolean isInconsistent() throws ElkException {
restoreConsistencyCheck();
if (!consistencyCheckingState.isInconsistent()) {
incompleteness_.log(incompleteness_
.getIncompletenessMonitorForClassification());
}
return consistencyCheckingState.isInconsistent();
}
|
java
|
{
"resource": ""
}
|
q176939
|
AbstractReasonerState.restoreTaxonomy
|
test
|
protected Taxonomy<ElkClass> restoreTaxonomy()
throws ElkInconsistentOntologyException, ElkException {
ruleAndConclusionStats.reset();
// also restores saturation and cleans the taxonomy if necessary
restoreConsistencyCheck();
if (consistencyCheckingState.isInconsistent()) {
throw new ElkInconsistentOntologyException();
}
complete(stageManager.classTaxonomyComputationStage);
return classTaxonomyState.getTaxonomy();
}
|
java
|
{
"resource": ""
}
|
q176940
|
AbstractReasonerState.restoreInstanceTaxonomy
|
test
|
protected InstanceTaxonomy<ElkClass, ElkNamedIndividual> restoreInstanceTaxonomy()
throws ElkInconsistentOntologyException, ElkException {
ruleAndConclusionStats.reset();
// also restores saturation and cleans the taxonomy if necessary
restoreConsistencyCheck();
if (consistencyCheckingState.isInconsistent()) {
throw new ElkInconsistentOntologyException();
}
complete(stageManager.instanceTaxonomyComputationStage);
return instanceTaxonomyState.getTaxonomy();
}
|
java
|
{
"resource": ""
}
|
q176941
|
ConsistencyCheckingState.getEvidence
|
test
|
public Proof<? extends EntailmentInference> getEvidence(
final boolean atMostOne) {
return new Proof<EntailmentInference>() {
@SuppressWarnings("unchecked")
@Override
public Collection<OntologyInconsistencyEntailmentInference> getInferences(
final Object conclusion) {
if (!OntologyInconsistencyImpl.INSTANCE.equals(conclusion)) {
return Collections.emptyList();
}
// else
final Collection<? extends IndexedIndividual> inconsistentIndividuals = getInconsistentIndividuals();
Iterable<OntologyInconsistencyEntailmentInference> result = Operations
.map(inconsistentIndividuals,
INDIVIDUAL_TO_ENTAILMENT_INFERENCE);
int size = inconsistentIndividuals.size();
if (isTopObjectPropertyInBottom_) {
result = Operations.concat(Operations
.<OntologyInconsistencyEntailmentInference> singleton(
new TopObjectPropertyInBottomEntailsOntologyInconsistencyImpl(
conclusionFactory_
.getSubPropertyChain(
topProperty_,
bottomProperty_))),
result);
size++;
}
if (isOwlThingInconsistent_) {
result = Operations.concat(Operations
.<OntologyInconsistencyEntailmentInference> singleton(
new OwlThingInconsistencyEntailsOntologyInconsistencyImpl(
conclusionFactory_.getContradiction(
owlThing_))),
result);
size++;
}
if (atMostOne) {
final Iterator<OntologyInconsistencyEntailmentInference> iter = result
.iterator();
if (!iter.hasNext()) {
return Collections.emptyList();
}
// else
return Collections.singleton(iter.next());
}
// else
return Operations.getCollection(result, size);
}
};
}
|
java
|
{
"resource": ""
}
|
q176942
|
AbstractReasonerStage.preExecute
|
test
|
@Override
public boolean preExecute() {
if (isInitialized_)
return false;
LOGGER_.trace("{}: initialized", this);
this.workerNo = reasoner.getNumberOfWorkers();
return isInitialized_ = true;
}
|
java
|
{
"resource": ""
}
|
q176943
|
AbstractReasonerStage.invalidateRecursive
|
test
|
public void invalidateRecursive() {
Queue<AbstractReasonerStage> toInvalidate_ = new LinkedList<AbstractReasonerStage>();
toInvalidate_.add(this);
AbstractReasonerStage next;
while ((next = toInvalidate_.poll()) != null) {
if (next.invalidate()) {
for (AbstractReasonerStage postStage : next.postStages_) {
toInvalidate_.add(postStage);
}
}
}
}
|
java
|
{
"resource": ""
}
|
q176944
|
InstanceTaxonomyState.getToAdd
|
test
|
Collection<IndexedIndividual> getToAdd() {
if (taxonomy_ == null) {
// No individual can be pruned.
return toAdd_;
}
// else
final int size = pruneToAdd();
/*
* since getting the size of the queue is a linear operation, use the
* computed size
*/
return Operations.getCollection(toAdd_, size);
}
|
java
|
{
"resource": ""
}
|
q176945
|
InstanceTaxonomyState.getToRemove
|
test
|
Collection<IndexedIndividual> getToRemove() {
if (taxonomy_ == null) {// TODO: Never set taxonomy_ to null !!!
// no individuals are in taxonomy
toRemove_.clear();
return Collections.emptyList();
}
// else
final int size = pruneToRemove();
/*
* since getting the size of the queue is a linear operation, use the
* computed size
*/
return Operations.getCollection(toRemove_, size);
}
|
java
|
{
"resource": ""
}
|
q176946
|
ElkReasoner.unsupportedOwlApiMethod
|
test
|
private static UnsupportedOperationException unsupportedOwlApiMethod(
String method) {
String message = "OWL API reasoner method is not implemented: " + method
+ ".";
/*
* TODO: The method String can be used to create more specific message
* types, but with the current large amount of unsupported methods and
* non-persistent settings for ignoring them, we better use only one
* message type to make it easier to ignore them.
*/
LoggerWrap.log(LOGGER_, LogLevel.WARN, MARKER_UNSUPPORTED_METHOD_,
message);
return new UnsupportedOperationException(message);
}
|
java
|
{
"resource": ""
}
|
q176947
|
LinearProbing.remove
|
test
|
static <E> void remove(E[] d, int pos) {
for (;;) {
int next = getMovedPosition(d, pos);
E moved = d[pos] = d[next];
if (moved == null)
return;
// else
pos = next;
}
}
|
java
|
{
"resource": ""
}
|
q176948
|
LinearProbing.remove
|
test
|
static <K, V> void remove(K[] k, V[] v, int pos) {
for (;;) {
int next = getMovedPosition(k, pos);
K moved = k[pos] = k[next];
v[pos] = v[next];
if (moved == null)
return;
// else
pos = next;
}
}
|
java
|
{
"resource": ""
}
|
q176949
|
LinearProbing.getMovedPosition
|
test
|
static <E> int getMovedPosition(E[] d, int del) {
int j = del;
for (;;) {
if (++j == d.length)
j = 0;
// invariant: interval ]del, j] contains only non-null elements
// whose index is in ]del, j]
E test = d[j];
if (test == null)
return j;
int k = getIndex(test, d.length);
// check if k is in ]del, j] (this interval can wrap over)
if ((del < j) ? (del < k) && (k <= j) : (del < k) || (k <= j))
// the test element should not be shifted
continue;
// else it should be shifted
return j;
}
}
|
java
|
{
"resource": ""
}
|
q176950
|
LinearProbing.contains
|
test
|
static <E> boolean contains(E[] d, Object o) {
int pos = getPosition(d, o);
if (d[pos] == null)
return false;
// else
return true;
}
|
java
|
{
"resource": ""
}
|
q176951
|
LinearProbing.add
|
test
|
static <E> boolean add(E[] d, E e) {
int pos = getPosition(d, e);
if (d[pos] == null) {
d[pos] = e;
return true;
}
// else the element is already there
return false;
}
|
java
|
{
"resource": ""
}
|
q176952
|
CachedIndexedComplexClassExpressionImpl.checkOccurrenceNumbers
|
test
|
public final void checkOccurrenceNumbers() {
if (LOGGER_.isTraceEnabled())
LOGGER_.trace(toString() + " occurences: "
+ printOccurrenceNumbers());
if (positiveOccurrenceNo < 0 || negativeOccurrenceNo < 0)
throw new ElkUnexpectedIndexingException(toString()
+ " has a negative occurrence: " + printOccurrenceNumbers());
}
|
java
|
{
"resource": ""
}
|
q176953
|
ClassConclusionTimer.add
|
test
|
public synchronized void add(ClassConclusionTimer timer) {
this.timeComposedSubsumers += timer.timeComposedSubsumers;
this.timeDecomposedSubsumers += timer.timeDecomposedSubsumers;
this.timeBackwardLinks += timer.timeBackwardLinks;
this.timeForwardLinks += timer.timeForwardLinks;
this.timeContradictions += timer.timeContradictions;
this.timePropagations += timer.timePropagations;
this.timeDisjointSubsumers += timer.timeDisjointSubsumers;
this.timeContextInitializations += timer.timeContextInitializations;
this.timeSubContextInitializations += timer.timeSubContextInitializations;
}
|
java
|
{
"resource": ""
}
|
q176954
|
RuleApplicationTimer.add
|
test
|
public synchronized void add(RuleApplicationTimer timer) {
timeOwlThingContextInitRule += timer.timeOwlThingContextInitRule;
timeRootContextInitializationRule += timer.timeRootContextInitializationRule;
timeDisjointSubsumerFromMemberRule += timer.timeDisjointSubsumerFromMemberRule;
timeContradictionFromNegationRule += timer.timeContradictionFromNegationRule;
timeObjectIntersectionFromFirstConjunctRule += timer.timeObjectIntersectionFromFirstConjunctRule;
timeObjectIntersectionFromSecondConjunctRule += timer.timeObjectIntersectionFromSecondConjunctRule;
timeSuperClassFromSubClassRule += timer.timeSuperClassFromSubClassRule;
timePropagationFromExistentialFillerRule += timer.timePropagationFromExistentialFillerRule;
timeObjectUnionFromDisjunctRule += timer.timeObjectUnionFromDisjunctRule;
timeBackwardLinkChainFromBackwardLinkRule += timer.timeBackwardLinkChainFromBackwardLinkRule;
timeReflexiveBackwardLinkCompositionRule += timer.timeReflexiveBackwardLinkCompositionRule;
timeNonReflexiveBackwardLinkCompositionRule += timer.timeNonReflexiveBackwardLinkCompositionRule;
timeSubsumerBackwardLinkRule += timer.timeSubsumerBackwardLinkRule;
timeContradictionOverBackwardLinkRule += timer.timeContradictionOverBackwardLinkRule;
timeContradictionPropagationRule += timer.timeContradictionPropagationRule;
timeContradictionCompositionRule += timer.timeContradictionCompositionRule;
timeIndexedObjectIntersectionOfDecomposition += timer.timeIndexedObjectIntersectionOfDecomposition;
timeIndexedObjectSomeValuesFromDecomposition += timer.timeIndexedObjectSomeValuesFromDecomposition;
timeIndexedObjectComplementOfDecomposition += timer.timeIndexedObjectComplementOfDecomposition;
timeIndexedObjectHasSelfDecomposition += timer.timeIndexedObjectHasSelfDecomposition;
timeContradictionFromOwlNothingRule += timer.timeContradictionFromOwlNothingRule;
timeSubsumerPropagationRule += timer.timeSubsumerPropagationRule;
timePropagationInitializationRule += timer.timePropagationInitializationRule;
timeBackwardLinkFromForwardLinkRule += timer.timeBackwardLinkFromForwardLinkRule;
timeComposedFromDecomposedSubsumerRule += timer.timeComposedFromDecomposedSubsumerRule;
timeIndexedClassDecompositionRule += timer.timeIndexedClassDecompositionRule;
timeIndexedClassFromDefinitionRule += timer.timeIndexedClassFromDefinitionRule;
timeEquivalentClassFirstFromSecondRule += timer.timeEquivalentClassFirstFromSecondRule;
timeEquivalentClassSecondFromFirstRule += timer.timeEquivalentClassSecondFromFirstRule;
}
|
java
|
{
"resource": ""
}
|
q176955
|
ArrayHashMap.putKeyValue
|
test
|
private static <K, V> V putKeyValue(K[] keys, V[] values, K key, V value) {
int pos = LinearProbing.getPosition(keys, key);
if (keys[pos] == null) {
keys[pos] = key;
values[pos] = value;
return null;
}
// else
V oldValue = values[pos];
values[pos] = value;
return oldValue;
}
|
java
|
{
"resource": ""
}
|
q176956
|
ArrayHashMap.removeEntry
|
test
|
private static <K, V> V removeEntry(K[] keys, V[] values, Object key) {
int pos = LinearProbing.getPosition(keys, key);
if (keys[pos] == null)
return null;
// else
V result = values[pos];
LinearProbing.remove(keys, values, pos);
return result;
}
|
java
|
{
"resource": ""
}
|
q176957
|
ArrayHashMap.enlarge
|
test
|
private void enlarge() {
int oldCapacity = keys.length;
if (oldCapacity == LinearProbing.MAXIMUM_CAPACITY)
throw new IllegalArgumentException(
"Map cannot grow beyond capacity: "
+ LinearProbing.MAXIMUM_CAPACITY);
K oldKeys[] = keys;
V oldValues[] = values;
int newCapacity = oldCapacity << 1;
@SuppressWarnings("unchecked")
K newKeys[] = (K[]) new Object[newCapacity];
@SuppressWarnings("unchecked")
V newValues[] = (V[]) new Object[newCapacity];
for (int i = 0; i < oldCapacity; i++) {
K key = oldKeys[i];
if (key != null)
putKeyValue(newKeys, newValues, key, oldValues[i]);
}
this.keys = newKeys;
this.values = newValues;
}
|
java
|
{
"resource": ""
}
|
q176958
|
ArrayHashMap.shrink
|
test
|
private void shrink() {
int oldCapacity = keys.length;
if (oldCapacity <= LinearProbing.DEFAULT_INITIAL_CAPACITY)
return;
K oldKeys[] = keys;
V oldValues[] = values;
int newCapacity = oldCapacity >> 1;
@SuppressWarnings("unchecked")
K newKeys[] = (K[]) new Object[newCapacity];
@SuppressWarnings("unchecked")
V newValues[] = (V[]) new Object[newCapacity];
for (int i = 0; i < oldCapacity; i++) {
K key = oldKeys[i];
if (key != null)
putKeyValue(newKeys, newValues, key, oldValues[i]);
}
this.keys = newKeys;
this.values = newValues;
}
|
java
|
{
"resource": ""
}
|
q176959
|
ConfigurationFactory.saveConfiguration
|
test
|
public void saveConfiguration(File configOnDisk, BaseConfiguration config)
throws ConfigurationException, IOException {
/*
* Unfortunately, we can't directly write the config on disk because the
* parameters in it may be just a subset of those on disk. So we load it
* first (alternatively one may use a singleton, which I typically try
* to avoid). It should work reasonably well unless there're too many
* parameters (in which case we should think of a mini key-value store).
*/
InputStream stream = null;
BaseConfiguration loadedConfig = null;
Properties diskProps = new Properties();
try {
stream = new FileInputStream(configOnDisk);
loadedConfig = getConfiguration(stream, "", config.getClass());
// copy parameters
copyParameters(loadedConfig, diskProps);
} catch (Throwable e) {
LOGGER_.info("Overwriting configuration since it can't be loaded (perhaps doesn't exist?)");
} finally {
IOUtils.closeQuietly(stream);
}
copyParameters(config, diskProps);
// now save it to the file
saveProperties(diskProps, configOnDisk);
}
|
java
|
{
"resource": ""
}
|
q176960
|
OreTaxonomyPrinter.printDeclarations
|
test
|
protected static void printDeclarations(Taxonomy<ElkClass> classTaxonomy,
ElkObject.Factory objectFactory, Appendable writer)
throws IOException {
List<ElkClass> classes = new ArrayList<ElkClass>(classTaxonomy
.getNodes().size() * 2);
for (TaxonomyNode<ElkClass> classNode : classTaxonomy.getNodes()) {
for (ElkClass clazz : classNode) {
if (!clazz.getIri().equals(PredefinedElkIris.OWL_THING)
&& !clazz.getIri()
.equals(PredefinedElkIris.OWL_NOTHING)) {
classes.add(clazz);
}
}
}
Collections.sort(classes, CLASS_COMPARATOR);
for (ElkClass clazz : classes) {
ElkDeclarationAxiom decl = objectFactory.getDeclarationAxiom(clazz);
OwlFunctionalStylePrinter.append(writer, decl, true);
writer.append('\n');
}
}
|
java
|
{
"resource": ""
}
|
q176961
|
TaxonomyNodeUtils.getAllInstanceNodes
|
test
|
public static <T extends ElkEntity, I extends ElkEntity, TN extends GenericTypeNode<T, I, TN, IN>, IN extends GenericInstanceNode<T, I, TN, IN>>
Set<? extends IN> getAllInstanceNodes(final GenericTypeNode<T, I, TN, IN> node) {
return TaxonomyNodeUtils.collectFromAllReachable(
node.getDirectSubNodes(),
node.getDirectInstanceNodes(),
new Operations.Functor<GenericTypeNode<T, I, TN, IN>, Set<? extends GenericTypeNode<T, I, TN, IN>>>() {
@Override
public Set<? extends TN> apply(final GenericTypeNode<T, I, TN, IN> node) {
return node.getDirectSubNodes();
}
},
new Operations.Functor<GenericTypeNode<T, I, TN, IN>, Set<? extends IN>>() {
@Override
public Set<? extends IN> apply(final GenericTypeNode<T, I, TN, IN> node) {
return node.getDirectInstanceNodes();
}
});
}
|
java
|
{
"resource": ""
}
|
q176962
|
EntryCollection.clear
|
test
|
@Override
public void clear() {
modCount++;
E[] tab = buckets;
for (int i = 0; i < tab.length; i++)
tab[i] = null;
size = 0;
}
|
java
|
{
"resource": ""
}
|
q176963
|
HashGenerator.combineMultisetHash
|
test
|
public static int combineMultisetHash(boolean finalize, int... hashes) {
int hash = 0;
for (int h : hashes) {
hash = hash + h;
}
if (finalize) {
hash = combineListHash(hash);
}
return hash;
}
|
java
|
{
"resource": ""
}
|
q176964
|
HashGenerator.combineListHash
|
test
|
public static int combineListHash(int... hashes) {
int hash = 0;
for (int h : hashes) {
hash += h;
hash += (hash << 10);
hash ^= (hash >> 6);
}
hash += (hash << 3);
hash ^= (hash >> 11);
hash += (hash << 15);
return hash;
}
|
java
|
{
"resource": ""
}
|
q176965
|
IOUtils.copy
|
test
|
public static int copy(InputStream input, OutputStream output)
throws IOException {
byte[] buffer = new byte[BUFFER_SIZE];
BufferedInputStream in = new BufferedInputStream(input, BUFFER_SIZE);
BufferedOutputStream out = new BufferedOutputStream(output, BUFFER_SIZE);
int count = 0, n = 0;
try {
while ((n = in.read(buffer, 0, BUFFER_SIZE)) != -1) {
out.write(buffer, 0, n);
count += n;
}
out.flush();
} finally {
IOUtils.closeQuietly(in);
IOUtils.closeQuietly(out);
}
return count;
}
|
java
|
{
"resource": ""
}
|
q176966
|
IncompletenessManager.getReasonerIncompletenessMonitor
|
test
|
public IncompletenessMonitor getReasonerIncompletenessMonitor(
final IncompletenessMonitor... additionalMonitors) {
final List<IncompletenessMonitor> monitors = new ArrayList<IncompletenessMonitor>(
additionalMonitors.length + 1);
monitors.add(getIncompletenessDueToStatedAxiomsMonitor());
monitors.addAll(Arrays.asList(additionalMonitors));
return new DelegatingIncompletenessMonitor(monitors) {
@Override
public boolean logNewIncompletenessReasons(final Logger logger) {
final boolean result = super.logNewIncompletenessReasons(
logger);
if (result) {
LoggerWrap.log(logger, LogLevel.WARN, MARKER_,
"Reasoning may be incomplete! See log level INFO for more details.");
}
return result;
}
};
}
|
java
|
{
"resource": ""
}
|
q176967
|
TaxonomyPrinter.processTaxomomy
|
test
|
protected static <T extends ElkEntity> void processTaxomomy(
final Taxonomy<T> taxonomy, final Appendable writer)
throws IOException {
final ElkObject.Factory factory = new ElkObjectEntityRecyclingFactory();
// Declarations.
final List<T> members = new ArrayList<T>(
taxonomy.getNodes().size() * 2);
for (final TaxonomyNode<T> node : taxonomy.getNodes()) {
for (final T member : node) {
// TODO: this should check whether IRIs are predefined!
if (!member.getIri()
.equals(taxonomy.getTopNode().getCanonicalMember()
.getIri())
&& !member.getIri().equals(taxonomy.getBottomNode()
.getCanonicalMember().getIri())) {
members.add(member);
}
}
}
Collections.sort(members, taxonomy.getKeyProvider().getComparator());
printDeclarations(members, factory, writer);
// Relations.
final TreeSet<T> canonicalMembers = new TreeSet<T>(
taxonomy.getKeyProvider().getComparator());
for (final TaxonomyNode<T> node : taxonomy.getNodes()) {
canonicalMembers.add(node.getCanonicalMember());
}
for (final T canonicalMember : canonicalMembers) {
final TaxonomyNode<T> node = taxonomy.getNode(canonicalMember);
final ArrayList<T> orderedEquivalentMembers = new ArrayList<T>(
node.size());
for (final T member : node) {
orderedEquivalentMembers.add(member);
}
Collections.sort(orderedEquivalentMembers,
taxonomy.getKeyProvider().getComparator());
final TreeSet<T> orderedSuperMembers = new TreeSet<T>(
taxonomy.getKeyProvider().getComparator());
for (final TaxonomyNode<T> superNode : node.getDirectSuperNodes()) {
orderedSuperMembers.add(superNode.getCanonicalMember());
}
printMemberAxioms(canonicalMember, orderedEquivalentMembers,
orderedSuperMembers, taxonomy, factory, writer);
}
}
|
java
|
{
"resource": ""
}
|
q176968
|
ConcurrentComputationWithInputs.submit
|
test
|
public synchronized boolean submit(I input) throws InterruptedException {
if (termination || isInterrupted())
return false;
buffer_.put(input);
return true;
}
|
java
|
{
"resource": ""
}
|
q176969
|
OwlFunctionalStylePrinter.append
|
test
|
public static void append(Appendable appender, ElkObject elkObject)
throws IOException {
append(appender, elkObject, false);
}
|
java
|
{
"resource": ""
}
|
q176970
|
ClassExpressionQueryState.markNotComputed
|
test
|
private QueryState markNotComputed(
final IndexedClassExpression queryClass) {
final QueryState state = indexed_.get(queryClass);
if (state == null || !state.isComputed) {
return null;
}
state.isComputed = false;
if (state.node != null) {
removeAllRelated(queryClass, state.node);
state.node = null;
}
return state;
}
|
java
|
{
"resource": ""
}
|
q176971
|
IndividualNode.addDirectTypeNode
|
test
|
@Override
public synchronized void addDirectTypeNode(final UTN typeNode) {
LOGGER_.trace("{}: new direct type-node {}", this, typeNode);
directTypeNodes_.add(typeNode);
}
|
java
|
{
"resource": ""
}
|
q176972
|
AbstractMatch.checkChainMatch
|
test
|
protected static void checkChainMatch(
final ElkSubObjectPropertyExpression fullChain,
final int startPos) {
// verifies that start position exists in full chain
fullChain.accept(new ElkSubObjectPropertyExpressionVisitor<Void>() {
void fail() {
throw new IllegalArgumentException(fullChain + ", " + startPos);
}
Void defaultVisit(ElkObjectPropertyExpression expression) {
if (startPos != 0) {
fail();
}
return null;
}
@Override
public Void visit(ElkObjectPropertyChain expression) {
if (startPos < 0 || startPos >= expression
.getObjectPropertyExpressions().size())
fail();
return null;
}
@Override
public Void visit(ElkObjectInverseOf expression) {
return defaultVisit(expression);
}
@Override
public Void visit(ElkObjectProperty expression) {
return defaultVisit(expression);
}
});
}
|
java
|
{
"resource": ""
}
|
q176973
|
Operations.filter
|
test
|
public static <T> Set<T> filter(final Set<? extends T> input,
final Condition<? super T> condition, final int size) {
return new Set<T>() {
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
@SuppressWarnings("unchecked")
public boolean contains(Object o) {
if (!input.contains(o))
return false;
T elem = null;
try {
elem = (T) o;
} catch (ClassCastException cce) {
return false;
}
/*
* here's why the condition must be consistent with equals(): we
* check it on the passed element while we really need to check
* it on the element which is in the underlying set (and is
* equal to o according to equals()). However, as long as the
* condition is consistent, the result will be the same.
*/
return condition.holds(elem);
}
@Override
public Iterator<T> iterator() {
return filter(input, condition).iterator();
}
@Override
public Object[] toArray() {
Object[] result = new Object[size];
int i = 0;
for (Object o : filter(input, condition)) {
result[i++] = o;
}
return result;
}
@Override
public <S> S[] toArray(S[] a) {
throw new UnsupportedOperationException();
}
@Override
public boolean add(T e) {
throw new UnsupportedOperationException();
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
if (contains(o))
return false;
}
return true;
}
@Override
public boolean addAll(Collection<? extends T> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
};
}
|
java
|
{
"resource": ""
}
|
q176974
|
Operations.map
|
test
|
public static <I, O> Set<O> map(final Set<? extends I> input,
final FunctorEx<I, O> functor) {
return new AbstractSet<O>() {
@Override
public Iterator<O> iterator() {
return new MapIterator<I, O>(input.iterator(), functor);
}
@Override
public boolean contains(Object o) {
I element = functor.deapply(o);
return element == null ? false : input.contains(element);
}
@Override
public int size() {
return input.size();
}
};
}
|
java
|
{
"resource": ""
}
|
q176975
|
ArraySlicedSet.add
|
test
|
public boolean add(int s, E e) {
if (e == null)
throw new NullPointerException();
int mask = (1 << s);
int oldMask = addMask(logs, data, masks, e, mask);
int newMask = oldMask | mask;
if (newMask == oldMask)
return false;
else if (oldMask == 0
&& ++occupied == LinearProbing.getUpperSize(data.length))
enlarge();
sizes[s]++;
return true;
}
|
java
|
{
"resource": ""
}
|
q176976
|
ArraySlicedSet.remove
|
test
|
public boolean remove(int s, Object o) {
if (o == null)
throw new NullPointerException();
int mask = 1 << s;
int oldMask = removeMask(logs, data, masks, o, mask);
int newMask = oldMask & ~mask;
if (newMask == oldMask)
return false;
// else
if (newMask == 0
&& --occupied == LinearProbing.getLowerSize(data.length))
shrink();
sizes[s]--;
return true;
}
|
java
|
{
"resource": ""
}
|
q176977
|
ClassConclusionCounter.add
|
test
|
public synchronized void add(ClassConclusionCounter counter) {
this.countSubClassInclusionDecomposed += counter.countSubClassInclusionDecomposed;
this.countSubClassInclusionComposed += counter.countSubClassInclusionComposed;
this.countBackwardLink += counter.countBackwardLink;
this.countForwardLink += counter.countForwardLink;
this.countContradiction += counter.countContradiction;
this.countPropagation += counter.countPropagation;
this.countDisjointSubsumer += counter.countDisjointSubsumer;
this.countContextInitialization += counter.countContextInitialization;
this.countSubContextInitialization += counter.countSubContextInitialization;
}
|
java
|
{
"resource": ""
}
|
q176978
|
Statistics.logMemoryUsage
|
test
|
public static void logMemoryUsage(Logger logger, LogLevel priority) {
if (LoggerWrap.isEnabledFor(logger, priority)) {
// Getting the runtime reference from system
Runtime runtime = Runtime.getRuntime();
LoggerWrap.log(logger, priority, "Memory (MB) Used/Total/Max: "
+ (runtime.totalMemory() - runtime.freeMemory())
/ megaBytes + "/" + runtime.totalMemory() / megaBytes + "/"
+ runtime.maxMemory() / megaBytes);
}
}
|
java
|
{
"resource": ""
}
|
q176979
|
Reasoner.setConfigurationOptions
|
test
|
public synchronized void setConfigurationOptions(
ReasonerConfiguration config) {
this.workerNo_ = config.getParameterAsInt(
ReasonerConfiguration.NUM_OF_WORKING_THREADS);
setAllowIncrementalMode(config.getParameterAsBoolean(
ReasonerConfiguration.INCREMENTAL_MODE_ALLOWED));
}
|
java
|
{
"resource": ""
}
|
q176980
|
Reasoner.shutdown
|
test
|
public synchronized boolean shutdown(long timeout, TimeUnit unit)
throws InterruptedException {
boolean success = true;
if (success) {
LOGGER_.info("ELK reasoner has shut down");
} else {
LOGGER_.error("ELK reasoner failed to shut down!");
}
return success;
}
|
java
|
{
"resource": ""
}
|
q176981
|
StatisticsPrinter.printHeader
|
test
|
public void printHeader() {
printSeparator();
addPadding(' ', headerParams_);
logger_.debug(String.format(headerFormat_, headerParams_));
printSeparator();
}
|
java
|
{
"resource": ""
}
|
q176982
|
StatisticsPrinter.print
|
test
|
public void print(Object... values) {
addPadding('.', values);
logger_.debug(String.format(valuesFormat_, values));
}
|
java
|
{
"resource": ""
}
|
q176983
|
StatisticsPrinter.getString
|
test
|
static String getString(char c, int n) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++) {
sb.append(c);
}
return sb.toString();
}
|
java
|
{
"resource": ""
}
|
q176984
|
RuleCounter.add
|
test
|
public synchronized void add(RuleCounter counter) {
countOwlThingContextInitRule += counter.countOwlThingContextInitRule;
countRootContextInitializationRule += counter.countRootContextInitializationRule;
countDisjointSubsumerFromMemberRule += counter.countDisjointSubsumerFromMemberRule;
countContradictionFromNegationRule += counter.countContradictionFromNegationRule;
countObjectIntersectionFromFirstConjunctRule += counter.countObjectIntersectionFromFirstConjunctRule;
countObjectIntersectionFromSecondConjunctRule += counter.countObjectIntersectionFromSecondConjunctRule;
countSuperClassFromSubClassRule += counter.countSuperClassFromSubClassRule;
countPropagationFromExistentialFillerRule += counter.countPropagationFromExistentialFillerRule;
countObjectUnionFromDisjunctRule += counter.countObjectUnionFromDisjunctRule;
countBackwardLinkChainFromBackwardLinkRule += counter.countBackwardLinkChainFromBackwardLinkRule;
countSubsumerBackwardLinkRule += counter.countSubsumerBackwardLinkRule;
countContradictionOverBackwardLinkRule += counter.countContradictionOverBackwardLinkRule;
countContradictionPropagationRule += counter.countContradictionPropagationRule;
countContradictionCompositionRule += counter.countContradictionCompositionRule;
countNonReflexiveBackwardLinkCompositionRule += counter.countNonReflexiveBackwardLinkCompositionRule;
countIndexedObjectIntersectionOfDecomposition += counter.countIndexedObjectIntersectionOfDecomposition;
countIndexedObjectSomeValuesFromDecomposition += counter.countIndexedObjectSomeValuesFromDecomposition;
countIndexedObjectComplementOfDecomposition += counter.countIndexedObjectComplementOfDecomposition;
countIndexedObjectHasSelfDecomposition += counter.countIndexedObjectHasSelfDecomposition;
countContradictionFromOwlNothingRule += counter.countContradictionFromOwlNothingRule;
countSubsumerPropagationRule += counter.countSubsumerPropagationRule;
countReflexiveBackwardLinkCompositionRule += counter.countReflexiveBackwardLinkCompositionRule;
countPropagationInitializationRule += counter.countPropagationInitializationRule;
countBackwardLinkFromForwardLinkRule += counter.countBackwardLinkFromForwardLinkRule;
countComposedFromDecomposedSubsumerRule += counter.countComposedFromDecomposedSubsumerRule;
countIndexedClassDecompositionRule += counter.countIndexedClassDecompositionRule;
countIndexedClassFromDefinitionRule += counter.countIndexedClassFromDefinitionRule;
countEquivalentClassFirstFromSecondRule += counter.countEquivalentClassFirstFromSecondRule;
countEquivalentClassSecondFromFirstRule += counter.countEquivalentClassSecondFromFirstRule;
}
|
java
|
{
"resource": ""
}
|
q176985
|
XhtmlResourceMessageConverter.writeResource
|
test
|
private void writeResource(XhtmlWriter writer, Object object) {
if (object == null) {
return;
}
try {
if (object instanceof Resource) {
Resource<?> resource = (Resource<?>) object;
writer.beginListItem();
writeResource(writer, resource.getContent());
writer.writeLinks(resource.getLinks());
writer.endListItem();
} else if (object instanceof Resources) {
Resources<?> resources = (Resources<?>) object;
// TODO set name using EVO see HypermediaSupportBeanDefinitionRegistrar
writer.beginListItem();
writer.beginUnorderedList();
Collection<?> content = resources.getContent();
writeResource(writer, content);
writer.endUnorderedList();
writer.writeLinks(resources.getLinks());
writer.endListItem();
} else if (object instanceof ResourceSupport) {
ResourceSupport resource = (ResourceSupport) object;
writer.beginListItem();
writeObject(writer, resource);
writer.writeLinks(resource.getLinks());
writer.endListItem();
} else if (object instanceof Collection) {
Collection<?> collection = (Collection<?>) object;
for (Object item : collection) {
writeResource(writer, item);
}
} else { // TODO: write li for simple objects in Resources Collection
writeObject(writer, object);
}
} catch (Exception ex) {
throw new RuntimeException("failed to transform object " + object, ex);
}
}
|
java
|
{
"resource": ""
}
|
q176986
|
SpringActionDescriptor.getActionInputParameter
|
test
|
@Override
public ActionInputParameter getActionInputParameter(String name) {
ActionInputParameter ret = requestParams.get(name);
if (ret == null) {
ret = pathVariables.get(name);
}
if (ret == null) {
for (ActionInputParameter annotatedParameter : getInputParameters()) {
// TODO create ActionInputParameter for bean property at property path
// TODO field access in addition to bean?
PropertyDescriptor pd = getPropertyDescriptorForPropertyPath(name,
annotatedParameter.getParameterType());
if (pd != null) {
if (pd.getWriteMethod() != null) {
Object callValue = annotatedParameter.getValue();
Object propertyValue = null;
if (callValue != null) {
BeanWrapper beanWrapper = PropertyAccessorFactory
.forBeanPropertyAccess(callValue);
propertyValue = beanWrapper.getPropertyValue(name);
}
ret = new SpringActionInputParameter(new MethodParameter(pd
.getWriteMethod(), 0), propertyValue);
}
break;
}
}
}
return ret;
}
|
java
|
{
"resource": ""
}
|
q176987
|
SpringActionDescriptor.getPropertyDescriptorForPropertyPath
|
test
|
PropertyDescriptor getPropertyDescriptorForPropertyPath(String propertyPath, Class<?> propertyType) {
int pos = PropertyAccessorUtils.getFirstNestedPropertySeparatorIndex(propertyPath);
// Handle nested properties recursively.
if (pos > -1) {
String nestedProperty = propertyPath.substring(0, pos);
String nestedPath = propertyPath.substring(pos + 1);
PropertyDescriptor propertyDescriptor = BeanUtils.getPropertyDescriptor(propertyType, nestedProperty);
// BeanWrapperImpl nestedBw = getNestedBeanWrapper(nestedProperty);
return getPropertyDescriptorForPropertyPath(nestedPath, propertyDescriptor.getPropertyType());
} else {
return BeanUtils.getPropertyDescriptor(propertyType, propertyPath);
}
}
|
java
|
{
"resource": ""
}
|
q176988
|
SpringActionDescriptor.getRequiredParameters
|
test
|
@Override
public Map<String, ActionInputParameter> getRequiredParameters() {
Map<String, ActionInputParameter> ret = new HashMap<String, ActionInputParameter>();
for (Map.Entry<String, ActionInputParameter> entry : requestParams.entrySet()) {
ActionInputParameter annotatedParameter = entry.getValue();
if (annotatedParameter.isRequired()) {
ret.put(entry.getKey(), annotatedParameter);
}
}
for (Map.Entry<String, ActionInputParameter> entry : pathVariables.entrySet()) {
ActionInputParameter annotatedParameter = entry.getValue();
ret.put(entry.getKey(), annotatedParameter);
}
// requestBody not supported, would have to use exploded modifier
return ret;
}
|
java
|
{
"resource": ""
}
|
q176989
|
DataType.isSingleValueType
|
test
|
public static boolean isSingleValueType(Class<?> clazz) {
boolean ret;
if (isNumber(clazz)
|| isBoolean(clazz)
|| isString(clazz)
|| isEnum(clazz)
|| isDate(clazz)
|| isCalendar(clazz)
|| isCurrency(clazz)
) {
ret = true;
} else {
ret = false;
}
return ret;
}
|
java
|
{
"resource": ""
}
|
q176990
|
Affordance.addRel
|
test
|
public void addRel(String rel) {
Assert.hasLength(rel);
linkParams.add(REL.paramName, rel);
}
|
java
|
{
"resource": ""
}
|
q176991
|
Affordance.setType
|
test
|
public void setType(String mediaType) {
if (mediaType != null)
linkParams.set(TYPE.paramName, mediaType);
else
linkParams.remove(TYPE.paramName);
}
|
java
|
{
"resource": ""
}
|
q176992
|
Affordance.addHreflang
|
test
|
public void addHreflang(String hreflang) {
Assert.hasLength(hreflang);
linkParams.add(HREFLANG.paramName, hreflang);
}
|
java
|
{
"resource": ""
}
|
q176993
|
Affordance.addRev
|
test
|
public void addRev(String rev) {
Assert.hasLength(rev);
linkParams.add(REV.paramName, rev);
}
|
java
|
{
"resource": ""
}
|
q176994
|
Affordance.addLinkParam
|
test
|
public void addLinkParam(String paramName, String... values) {
Assert.notEmpty(values);
for (String value : values) {
Assert.hasLength(value);
linkParams.add(paramName, value);
}
}
|
java
|
{
"resource": ""
}
|
q176995
|
Affordance.expand
|
test
|
@Override
public Affordance expand(Map<String, ? extends Object> arguments) {
UriTemplate template = new UriTemplate(partialUriTemplate.asComponents()
.toString());
String expanded = template.expand(arguments)
.toASCIIString();
return new Affordance(expanded, linkParams, actionDescriptors);
}
|
java
|
{
"resource": ""
}
|
q176996
|
Affordance.getRels
|
test
|
@JsonIgnore
public List<String> getRels() {
final List<String> rels = linkParams.get(REL.paramName);
return rels == null ? Collections.<String>emptyList() : Collections.unmodifiableList(rels);
}
|
java
|
{
"resource": ""
}
|
q176997
|
Affordance.getRevs
|
test
|
@JsonIgnore
public List<String> getRevs() {
final List<String> revs = linkParams.get(REV.paramName);
return revs == null ? Collections.<String>emptyList() : Collections.unmodifiableList(revs);
}
|
java
|
{
"resource": ""
}
|
q176998
|
Affordance.hasUnsatisfiedRequiredVariables
|
test
|
@JsonIgnore
public boolean hasUnsatisfiedRequiredVariables() {
for (ActionDescriptor actionDescriptor : actionDescriptors) {
Map<String, ActionInputParameter> requiredParameters =
actionDescriptor.getRequiredParameters();
for (ActionInputParameter annotatedParameter : requiredParameters.values()) {
if (!annotatedParameter.hasValue()) {
return true;
}
}
}
return false;
}
|
java
|
{
"resource": ""
}
|
q176999
|
SpringActionInputParameter.getValueFormatted
|
test
|
public String getValueFormatted() {
String ret;
if (value == null) {
ret = null;
} else {
ret = (String) conversionService.convert(value, typeDescriptor, TypeDescriptor.valueOf(String.class));
}
return ret;
}
|
java
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.