code
stringlengths
67
466k
docstring
stringlengths
1
13.2k
public QueryExpression le(String propertyName,String value) { return new SimpleQueryExpression(propertyName, ComparisonOperator.LESS_THAN_OR_EQUAL,wrap(value)); }
Create a less than or equals expression @param propertyName The propery name @param value The value @return The query expression
public QueryExpression in(String propertyName,String... values) { return new MultiValueQueryExpression(propertyName, ComparisonOperator.IN,wrap(values)); }
Create an in expression @param propertyName The propery name @param values The values @return The query expression
public QueryExpression notIn(String propertyName,String... values) { return new MultiValueQueryExpression(propertyName, ComparisonOperator.NOT_IN,wrap(values)); }
Create a not in expression @param propertyName The propery name @param values The values @return The query expression
@Override @LogarithmicTime(amortized = true) public AddressableHeap.Handle<K, V> insert(K key, V value) { if (other != this) { throw new IllegalStateException("A heap cannot be used after a meld"); } if (key == null) { throw new NullPointerException("Null keys not permitted"); } Node<K, V> n = new Node<K, V>(this, key, value); if (comparator == null) { root = link(root, n); } else { root = linkWithComparator(root, n); } size++; return n; }
{@inheritDoc} @throws IllegalStateException if the heap has already been used in the right hand side of a meld
@Override @LogarithmicTime(amortized = true) public AddressableHeap.Handle<K, V> deleteMin() { if (size == 0) { throw new NoSuchElementException(); } // assert root.o_s == null && root.y_s == null; Handle<K, V> oldRoot = root; // cut all children, combine them and overwrite old root root = combine(cutChildren(root)); // decrease size size--; return oldRoot; }
{@inheritDoc}
@Override @LogarithmicTime(amortized = true) public void meld(MergeableAddressableHeap<K, V> other) { PairingHeap<K, V> h = (PairingHeap<K, V>) other; // check same comparator if (comparator != null) { if (h.comparator == null || !h.comparator.equals(comparator)) { throw new IllegalArgumentException("Cannot meld heaps using different comparators!"); } } else if (h.comparator != null) { throw new IllegalArgumentException("Cannot meld heaps using different comparators!"); } if (h.other != h) { throw new IllegalStateException("A heap cannot be used after a meld."); } // perform the meld size += h.size; if (comparator == null) { root = link(root, h.root); } else { root = linkWithComparator(root, h.root); } // clear other h.size = 0; h.root = null; // take ownership h.other = this; }
{@inheritDoc}
@SuppressWarnings("unchecked") private void decreaseKey(Node<K, V> n, K newKey) { int c; if (comparator == null) { c = ((Comparable<? super K>) newKey).compareTo(n.key); } else { c = comparator.compare(newKey, n.key); } if (c > 0) { throw new IllegalArgumentException("Keys can only be decreased!"); } n.key = newKey; if (c == 0 || root == n) { return; } if (n.o_s == null) { throw new IllegalArgumentException("Invalid handle!"); } // unlink from parent if (n.y_s != null) { n.y_s.o_s = n.o_s; } if (n.o_s.o_c == n) { // I am the oldest :( n.o_s.o_c = n.y_s; } else { // I have an older sibling! n.o_s.y_s = n.y_s; } n.y_s = null; n.o_s = null; // merge with root if (comparator == null) { root = link(root, n); } else { root = linkWithComparator(root, n); } }
Decrease the key of a node. @param n the node @param newKey the new key
private void delete(Node<K, V> n) { if (root == n) { deleteMin(); n.o_c = null; n.y_s = null; n.o_s = null; return; } if (n.o_s == null) { throw new IllegalArgumentException("Invalid handle!"); } // unlink from parent if (n.y_s != null) { n.y_s.o_s = n.o_s; } if (n.o_s.o_c == n) { // I am the oldest :( n.o_s.o_c = n.y_s; } else { // I have an older sibling! n.o_s.y_s = n.y_s; } n.y_s = null; n.o_s = null; // perform delete-min at tree rooted at this Node<K, V> t = combine(cutChildren(n)); // and merge with other cut tree if (comparator == null) { root = link(root, t); } else { root = linkWithComparator(root, t); } size--; }
/* Delete a node
private Node<K, V> combine(Node<K, V> l) { if (l == null) { return null; } assert l.o_s == null; // left-right pass Node<K, V> pairs = null; Node<K, V> it = l, p_it; if (comparator == null) { // no comparator while (it != null) { p_it = it; it = it.y_s; if (it == null) { // append last node to pair list p_it.y_s = pairs; p_it.o_s = null; pairs = p_it; } else { Node<K, V> n_it = it.y_s; // disconnect both p_it.y_s = null; p_it.o_s = null; it.y_s = null; it.o_s = null; // link trees p_it = link(p_it, it); // append to pair list p_it.y_s = pairs; pairs = p_it; // advance it = n_it; } } } else { while (it != null) { p_it = it; it = it.y_s; if (it == null) { // append last node to pair list p_it.y_s = pairs; p_it.o_s = null; pairs = p_it; } else { Node<K, V> n_it = it.y_s; // disconnect both p_it.y_s = null; p_it.o_s = null; it.y_s = null; it.o_s = null; // link trees p_it = linkWithComparator(p_it, it); // append to pair list p_it.y_s = pairs; pairs = p_it; // advance it = n_it; } } } // second pass (reverse order - due to add first) it = pairs; Node<K, V> f = null; if (comparator == null) { while (it != null) { Node<K, V> nextIt = it.y_s; it.y_s = null; f = link(f, it); it = nextIt; } } else { while (it != null) { Node<K, V> nextIt = it.y_s; it.y_s = null; f = linkWithComparator(f, it); it = nextIt; } } return f; }
/* Two pass pair and compute root.
private Node<K, V> cutChildren(Node<K, V> n) { Node<K, V> child = n.o_c; n.o_c = null; if (child != null) { child.o_s = null; } return child; }
Cut the children of a node and return the list. @param n the node @return the first node in the children list
public static SnorocketReasoner load(InputStream in) { SnorocketReasoner res; ObjectInputStream ois = null; try { ois = new ObjectInputStream(in); res = (SnorocketReasoner)ois.readObject(); } catch(Exception e) { log.error("Problem loading reasoner." + e); throw new RuntimeException(e); } finally { if(ois != null) { try { ois.close(); } catch(Exception e) {} } } res.no.buildTaxonomy(); return res; }
Loads a saved instance of a {@link SnorocketReasoner} from an input stream. @param in @return
protected Concept transformToModel(Object obj) { if(obj instanceof NamedConcept) { return (NamedConcept) obj; } else if(obj instanceof String) { return new NamedConcept((String) obj); } else if(obj instanceof au.csiro.snorocket.core.model.Conjunction) { au.csiro.snorocket.core.model.Conjunction conj = (au.csiro.snorocket.core.model.Conjunction) obj; List<Concept> conjs = new ArrayList<>(); for(AbstractConcept ac : conj.getConcepts()) { conjs.add(transformToModel(ac)); } return new Conjunction(conjs.toArray(new Concept[conjs.size()])); } else if(obj instanceof au.csiro.snorocket.core.model.Existential) { au.csiro.snorocket.core.model.Existential ex = (au.csiro.snorocket.core.model.Existential) obj; String roleId = (String) factory.lookupRoleId(ex.getRole()); Concept con = transformToModel(ex.getConcept()); return new Existential(new NamedRole(roleId), con); } else if(obj instanceof au.csiro.snorocket.core.model.Datatype) { au.csiro.snorocket.core.model.Datatype dt = (au.csiro.snorocket.core.model.Datatype) obj; String featureId = factory.lookupFeatureId(dt.getFeature()); Literal l = transformLiteralToModel(dt.getLiteral()); return new Datatype(new NamedFeature(featureId), dt.getOperator(), l); } else if(obj instanceof au.csiro.snorocket.core.model.Concept) { au.csiro.snorocket.core.model.Concept c = (au.csiro.snorocket.core.model.Concept) obj; Object id = factory.lookupConceptId(c.hashCode()); if(id instanceof String) { return new NamedConcept((String) id); } else if(id instanceof NamedConcept) { // If TOP or BOTTOM return (NamedConcept) id; } else { return transformToModel(id); } } else { throw new RuntimeException("Unexpected abstract concept " + obj.getClass()); } }
The {@link CoreFactory} can currently hold very different types of objects. These include: <ul> <li>NamedConcept: for TOP and BOTTOM</li> <li>Strings: for named concepts</li> <li>au.csiro.snorocket.core.model.Conjunction</li> <li>au.csiro.snorocket.core.model.Datatype</li> <li>au.csiro.snorocket.core.model.Existential</li> </ul> @param obj @return
protected Literal transformLiteralToModel(AbstractLiteral al) { if(al instanceof BigIntegerLiteral) { return new au.csiro.ontology.model.BigIntegerLiteral(((BigIntegerLiteral) al).getValue()); } else if(al instanceof DateLiteral) { return new au.csiro.ontology.model.DateLiteral(((DateLiteral) al).getValue()); } else if(al instanceof DecimalLiteral) { return new au.csiro.ontology.model.DecimalLiteral(((DecimalLiteral) al).getValue()); } else if(al instanceof FloatLiteral) { return new au.csiro.ontology.model.FloatLiteral(((FloatLiteral) al).getValue()); } else if(al instanceof IntegerLiteral) { return new au.csiro.ontology.model.IntegerLiteral(((IntegerLiteral) al).getValue()); } else if(al instanceof StringLiteral) { return new au.csiro.ontology.model.StringLiteral(((StringLiteral) al).getValue()); } else { throw new RuntimeException("Unexpected abstract literal "+al); } }
Transforms literal from the internal representation to the canonical representation. @param al @return
public Collection<Axiom> getInferredAxioms() { final Collection<Axiom> inferred = new HashSet<>(); if(!isClassified) classify(); if (!no.isTaxonomyComputed()) { log.info("Building taxonomy"); no.buildTaxonomy(); } final Map<String, Node> taxonomy = no.getTaxonomy(); final IConceptMap<Context> contextIndex = no.getContextIndex(); final IntIterator itr = contextIndex.keyIterator(); while (itr.hasNext()) { final int key = itr.next(); final String id = factory.lookupConceptId(key).toString(); if (factory.isVirtualConcept(key) || NamedConcept.BOTTOM.equals(id)) { continue; } Concept rhs = getNecessary(contextIndex, taxonomy, key); final Concept lhs = new NamedConcept(id); if (!lhs.equals(rhs) && !rhs.equals(NamedConcept.TOP_CONCEPT)) { // skip trivial axioms inferred.add(new ConceptInclusion(lhs, rhs)); } } return inferred; }
Ideally we'd return some kind of normal form axioms here. However, in the presence of GCIs this is not well defined (as far as I know - Michael). <p> Instead, we will return stated form axioms for Sufficient conditions (i.e. for INamedConcept on the RHS), and SNOMED CT DNF-based axioms for Necessary conditions. The former is just a filter over the stated axioms, the latter requires looking at the Taxonomy and inferred relationships. <p> Note that there will be <i>virtual</i> INamedConcepts that need to be skipped/expanded and redundant IExistentials that need to be filtered. @return
private IConceptSet filterEquivalents(final IConceptSet concepts) { int[] cArray = concepts.toArray(); boolean[] toExclude = new boolean[cArray.length]; for(int i = 0; i < cArray.length; i++) { if(toExclude[i]) continue; final IConceptSet iAncestors = IConceptSet.FACTORY.createConceptSet(getAncestors(no, cArray[i])); for(int j = i+1; j < cArray.length; j++) { if(iAncestors.contains(cArray[j])) { final IConceptSet jAncestors = IConceptSet.FACTORY.createConceptSet(getAncestors(no, cArray[j])); if(jAncestors.contains(cArray[i])) { // These concepts are equivalent to mark the second concept as excluded toExclude[j] = true; } } } } IConceptSet res = IConceptSet.FACTORY.createConceptSet(); for(int i = 0; i < cArray.length; i++) { if(!toExclude[i]) { res.add(cArray[i]); } } return res; }
Identifies any equivalent concepts and retains only one of them. @param concepts @return
private IConceptSet getLeaves(final IConceptSet concepts) { // Deal with any equivalent concepts. If there are equivalent concepts in the set then we only keep one of them. // Otherwise, both will get eliminated from the final set. final IConceptSet filtered = filterEquivalents(concepts); final IConceptSet leafBs = IConceptSet.FACTORY.createConceptSet(filtered); final IConceptSet set = IConceptSet.FACTORY.createConceptSet(leafBs); for (final IntIterator bItr = set.iterator(); bItr.hasNext(); ) { final int b = bItr.next(); final IConceptSet ancestors = IConceptSet.FACTORY.createConceptSet(getAncestors(no, b)); ancestors.remove(b); leafBs.removeAll(ancestors); } return leafBs; }
Given a set of concepts, computes the subset such that no member of the subset is subsumed by another member. result = {c | c in bs and not c' in b such that c' [ c} @param concepts set of subsumptions to filter @return
protected String printConcept(int id) { Object oid = factory.lookupConceptId(id); if(factory.isVirtualConcept(id)) { if(oid instanceof AbstractConcept) { return printAbstractConcept((AbstractConcept) oid); } else { return oid.toString(); } } else { return (String) oid; } }
Prints a concept given its internal id. Useful for debugging. @param id @return
private String printAbstractConcept(AbstractConcept ac) { if(ac instanceof au.csiro.snorocket.core.model.Concept) { au.csiro.snorocket.core.model.Concept c = (au.csiro.snorocket.core.model.Concept) ac; Object o = factory.lookupConceptId(c.hashCode()); if(o instanceof String) { return (String) o; } else { return printAbstractConcept((AbstractConcept) o); } } else if(ac instanceof au.csiro.snorocket.core.model.Conjunction) { au.csiro.snorocket.core.model.Conjunction c = (au.csiro.snorocket.core.model.Conjunction) ac; AbstractConcept[] acs = c.getConcepts(); StringBuilder sb = new StringBuilder(); if(acs != null && acs.length > 0) { sb.append(printAbstractConcept(acs[0])); for(int i = 1; i < acs.length; i++) { sb.append(" + "); sb.append(printAbstractConcept(acs[i])); } } return sb.toString(); } else if(ac instanceof au.csiro.snorocket.core.model.Existential) { au.csiro.snorocket.core.model.Existential e = (au.csiro.snorocket.core.model.Existential) ac; return "E"+factory.lookupRoleId(e.getRole()).toString()+"."+printAbstractConcept(e.getConcept()); } else if(ac instanceof au.csiro.snorocket.core.model.Datatype) { au.csiro.snorocket.core.model.Datatype d = (au.csiro.snorocket.core.model.Datatype) ac; return "F"+factory.lookupFeatureId(d.getFeature())+".("+d.getOperator()+", "+d.getLiteral()+")"; } else { throw new RuntimeException("Unexpected concept: " + ac); } }
Prints an abstract concept. Useful for debugging. @param ac @return
public static String getParameterValueFromQuery(String query, String paramName) { String[] components = query.split("&"); for (String keyValuePair : components) { String[] pairComponents = keyValuePair.split("="); if (pairComponents.length == 2) { try { String key = URLDecoder.decode(pairComponents[0], "utf-8"); if (key.compareTo(paramName) == 0) { return URLDecoder.decode(pairComponents[1], "utf-8"); } } catch (UnsupportedEncodingException e) { logger.error("getParameterValueFromQuery failed with exception: " + e.getLocalizedMessage(), e); } } } return null; }
Obtains a parameter with specified name from from query string. The query should be in format param=value&param=value ... @param query Queery in "url" format. @param paramName Parameter name. @return Parameter value, or null.
public static JSONObject extractSecureJson(Response response) { try { String responseText = response.getResponseText(); if (!responseText.startsWith(SECURE_PATTERN_START) || !responseText.endsWith(SECURE_PATTERN_END)) { return null; } int startIndex = responseText.indexOf(SECURE_PATTERN_START); int endIndex = responseText.indexOf(SECURE_PATTERN_END, responseText.length() - SECURE_PATTERN_END.length() - 1); String jsonString = responseText.substring(startIndex + SECURE_PATTERN_START.length(), endIndex); return new JSONObject(jsonString); } catch (Throwable t) { logger.error("extractSecureJson failed with exception: " + t.getLocalizedMessage(), t); return null; } }
Extracts a JSON object from server response with secured string. @param response Server response @return Extracted secured JSON or null.
public static String buildRewriteDomain(String backendRoute, String subzone) throws MalformedURLException { if (backendRoute == null || backendRoute.isEmpty()) { logger.error("Backend route can't be null."); return null; } String applicationRoute = backendRoute; if (!applicationRoute.startsWith(BMSClient.HTTP_SCHEME)) { applicationRoute = String.format("%s://%s", BMSClient.HTTPS_SCHEME, applicationRoute); } else if (!applicationRoute.startsWith(BMSClient.HTTPS_SCHEME) && applicationRoute.contains(BLUEMIX_NAME)) { applicationRoute = applicationRoute.replace(BMSClient.HTTP_SCHEME, BMSClient.HTTPS_SCHEME); } URL url = new URL(applicationRoute); String host = url.getHost(); String rewriteDomain; String regionInDomain = "ng"; int port = url.getPort(); String serviceUrl = String.format("%s://%s", url.getProtocol(), host); if (port != 0) { serviceUrl += ":" + String.valueOf(port); } String[] hostElements = host.split("\\."); if (!serviceUrl.contains(STAGE1_NAME)) { // Multi-region: myApp.eu-gb.mybluemix.net // US: myApp.mybluemix.net if (hostElements.length == 4) { regionInDomain = hostElements[hostElements.length - 3]; } // this is production, because STAGE1 is not found // Multi-Region Eg: eu-gb.bluemix.net // US Eg: ng.bluemix.net rewriteDomain = String.format("%s.%s", regionInDomain, BLUEMIX_DOMAIN); } else { // Multi-region: myApp.stage1.eu-gb.mybluemix.net // US: myApp.stage1.mybluemix.net if (hostElements.length == 5) { regionInDomain = hostElements[hostElements.length - 3]; } if (subzone != null && !subzone.isEmpty()) { // Multi-region Dev subzone Eg: stage1-Dev.eu-gb.bluemix.net // US Dev subzone Eg: stage1-Dev.ng.bluemix.net rewriteDomain = String.format("%s-%s.%s.%s", STAGE1_NAME, subzone, regionInDomain, BLUEMIX_DOMAIN); } else { // Multi-region Eg: stage1.eu-gb.bluemix.net // US Eg: stage1.ng.bluemix.net rewriteDomain = String.format("%s.%s.%s", STAGE1_NAME, regionInDomain, BLUEMIX_DOMAIN); } } return rewriteDomain; }
Builds rewrite domain from backend route url. @param backendRoute Backend route. @param subzone Subzone @return Rewrite domain. @throws MalformedURLException if backendRoute parameter has invalid format.
public static String concatenateUrls(String rootUrl, String path) { if (rootUrl == null || rootUrl.isEmpty()) { return path; } if (path == null || path.isEmpty()) { return rootUrl; } String finalUrl; if (rootUrl.charAt(rootUrl.length() - 1) == '/' && path.charAt(0) == '/') { finalUrl = rootUrl.substring(0, rootUrl.length() - 2) + path; } else if (rootUrl.charAt(rootUrl.length() - 1) != '/' && path.charAt(0) != '/') { finalUrl = rootUrl + "/" + path; } else { finalUrl = rootUrl + path; } return finalUrl; }
Concatenates two URLs. The function checks for trailing and preceding slashes in rootUrl and path. @param rootUrl first part of the url @param path second part of the url @return Concatenated string containing rootUrl and path.
private void init(NormalisedOntology ont) { parentTodo = ont.getTodo(); contextIndex = ont.getContextIndex(); ontologyNF1 = ont.getOntologyNF1(); ontologyNF2 = ont.getOntologyNF2(); ontologyNF3 = ont.getOntologyNF3(); ontologyNF4 = ont.getOntologyNF4(); ontologyNF5 = ont.getOntologyNF5(); reflexiveRoles = ont.getReflexiveRoles(); ontologyNF7 = ont.getOntologyNF7(); ontologyNF8 = ont.getOntologyNF8(); functionalFeatures = ont.getFunctionalFeatures(); roleClosureCache = ont.getRoleClosureCache(); factory = ont.getFactory(); affectedContexts = ont.getAffectedContexts(); }
Initialises the shared variables. @param ont
public void primeQueuesIncremental( MonotonicCollection<IConjunctionQueueEntry> conceptEntries, MonotonicCollection<IRoleQueueEntry> roleEntries, MonotonicCollection<IFeatureQueueEntry> featureEntries) { if (conceptEntries != null) addToConceptQueue(conceptEntries); if (roleEntries != null) roleQueue.addAll(roleEntries); if (featureEntries != null) featureQueue.addAll(featureEntries); }
Adds queue entries for this concept based on the new axioms added in an incremental classification. @param conceptEntries @param roleEntries @param featureEntries
public void deactivate() { active.set(false); if (!(conceptQueue.isEmpty() && roleQueue.isEmpty() && featureQueue.isEmpty())) { if (activate()) { parentTodo.add(this); } } }
Deactivates the context. Returns true if the context was active and was deactivated by this method call or false otherwise.
public void processExternalEdge(final int role, final int src) { externalQueue.add(new IRoleQueueEntry() { /** * Serialisation version. */ private static final long serialVersionUID = 1L; @Override public int getR() { return role; } @Override public int getB() { return src; } }); }
Triggers the processing of an edge based on events that happened in another {@link Context}. @param role @param src
private boolean datatypeMatches(Datatype d1, Datatype d2) { assert (d1.getFeature() == d2.getFeature()); AbstractLiteral lhsLit = d1.getLiteral(); AbstractLiteral rhsLit = d2.getLiteral(); Operator lhsOp = d1.getOperator(); Operator rhsOp = d2.getOperator(); if (rhsOp == Operator.EQUALS) { // If the rhs operator is =, then the expression will only match // if the lhs operator is also = and the literal values are the // same. if(lhsOp != Operator.EQUALS) { return false; } else { return d1.getLiteral().equals(d2.getLiteral()); } } else if (rhsOp == Operator.GREATER_THAN) { if (lhsOp == Operator.LESS_THAN || lhsOp == Operator.LESS_THAN_EQUALS) { return false; } else if (lhsOp == Operator.EQUALS) { if (compareLiterals(lhsLit, rhsLit) > 0) { return true; } else { return false; } } else if (lhsOp == Operator.GREATER_THAN) { if (compareLiterals(lhsLit, rhsLit) >= 0) { return true; } else { return false; } } else if (lhsOp == Operator.GREATER_THAN_EQUALS) { if (compareLiterals(lhsLit, rhsLit) > 0) { return true; } else { return false; } } } else if (rhsOp == Operator.GREATER_THAN_EQUALS) { if (lhsOp == Operator.LESS_THAN || lhsOp == Operator.LESS_THAN_EQUALS) { return false; } else if (lhsOp == Operator.EQUALS) { if (compareLiterals(lhsLit, rhsLit) >= 0) { return true; } else { return false; } } else if (lhsOp == Operator.GREATER_THAN) { if (compareLiterals(lhsLit, rhsLit) >= -1) { return true; } else { return false; } } else if (lhsOp == Operator.GREATER_THAN_EQUALS) { if (compareLiterals(lhsLit, rhsLit) >= 0) { return true; } else { return false; } } } else if (rhsOp == Operator.LESS_THAN) { if (lhsOp == Operator.GREATER_THAN || lhsOp == Operator.GREATER_THAN_EQUALS) { return false; } else if (lhsOp == Operator.EQUALS) { if (compareLiterals(lhsLit, rhsLit) < 0) { return true; } else { return false; } } else if (lhsOp == Operator.LESS_THAN) { if (compareLiterals(lhsLit, rhsLit) <= 0) { return true; } else { return false; } } else if (lhsOp == Operator.LESS_THAN_EQUALS) { if (compareLiterals(lhsLit, rhsLit) < 0) { return true; } else { return false; } } } else if (rhsOp == Operator.LESS_THAN_EQUALS) { if (lhsOp == Operator.GREATER_THAN || lhsOp == Operator.GREATER_THAN_EQUALS) { return false; } else if (lhsOp == Operator.EQUALS) { if (compareLiterals(lhsLit, rhsLit) <= 0) { return true; } else { return false; } } else if (lhsOp == Operator.LESS_THAN) { if (compareLiterals(lhsLit, rhsLit) <= 1) { return true; } else { return false; } } else if (lhsOp == Operator.LESS_THAN_EQUALS) { if (compareLiterals(lhsLit, rhsLit) <= 0) { return true; } else { return false; } } } return d1.getLiteral().equals(d2.getLiteral()); }
Evaluates the equivalence of two {@link Datatype}s. This method assumes that the literals both have the same matching literal types. @param d1 Data type from an NF7 entry. @param d2 Data type from an NF8 entry. @return boolean
private void processNewEdge(int role, int b) { final RoleSet roleClosure = getRoleClosure(role); processRole(role, b); for (int s = roleClosure.first(); s >= 0; s = roleClosure.next(s + 1)) { if (s == role) continue; processRole(s, b); } }
Process new subsumption: a [ role.b @param a @param role @param b
private void processOntologyTracking() { boolean done; do { done = true; // Process concept queue if (!conceptQueue.isEmpty()) { do { done = false; final IConjunctionQueueEntry entry = conceptQueue.remove(); final int b = entry.getB(); if (!s.contains(b)) { final int bi = entry.getBi(); if (s.contains(bi)) { s.add(b); changed = true; processNewSubsumptionTracking(b); } } } while (!conceptQueue.isEmpty()); } // Process feature queue if (!featureQueue.isEmpty()) { do { done = false; final IFeatureQueueEntry entry = featureQueue.remove(); Datatype d = entry.getD(); // Handle functional features checkFunctionalFeatures(d); // Get right hand sides from NF8 expressions that // match d on their left hand side MonotonicCollection<NF8> entries = ontologyNF8.get(d.getFeature()); if (entries == null) continue; // Evaluate to determine the ones that match MonotonicCollection<IConjunctionQueueEntry> res = new MonotonicCollection<IConjunctionQueueEntry>(2); for (final NF8 e : entries) { Datatype d2 = e.lhsD; // If they match add a conjunction queue entry // to queueA if (datatypeMatches(d, d2)) { res.add(new IConjunctionQueueEntry() { /** * Serialisation version. */ private static final long serialVersionUID = 1L; @Override public int getBi() { return CoreFactory.TOP_CONCEPT; } @Override public int getB() { return e.rhsB; } }); } } addToConceptQueue(res); } while (!featureQueue.isEmpty()); } // Process role queue if (!roleQueue.isEmpty()) { done = false; final IRoleQueueEntry entry = roleQueue.remove(); if (!succ.lookupConcept(entry.getR()).contains(entry.getB())) { processNewEdgeTracking(entry.getR(), entry.getB()); } } if (!externalQueue.isEmpty()) { done = false; final IRoleQueueEntry entry = externalQueue.remove(); processNewEdgeTracking(entry.getR(), entry.getB()); } } while (!done); }
//////////////////////////////////////////////////////////////////////////
@Override public BundlePathMapping build(List<String> strPathMappings) { BundlePathMapping bundlePathMapping = new BundlePathMapping(bundle); for (JoinableResourceBundle child : ((CompositeResourceBundle) bundle).getChildBundles()) { if (!child.getInclusionPattern().isIncludeOnlyOnDebug()) { bundlePathMapping.getItemPathList().addAll(child.getItemPathList()); addFilePathMapping(bundlePathMapping, child.getItemPathList()); } if (!child.getInclusionPattern().isExcludeOnDebug()) { bundlePathMapping.getItemDebugPathList().addAll(child.getItemDebugPathList()); addFilePathMapping(bundlePathMapping, child.getItemDebugPathList()); } bundlePathMapping.getLicensesPathList().addAll(child.getLicensesPathList()); addFilePathMapping(bundlePathMapping, child.getLicensesPathList()); } return bundlePathMapping; }
/* (non-Javadoc) @see net.jawr.web.resource.bundle.mappings.BundlePathMappingBuilder#build(java .util.List)
@Override public void init() throws ServletException { try { String type = getServletConfig().getInitParameter(JawrConstant.TYPE_INIT_PARAMETER); if (JawrConstant.BINARY_TYPE.equals(type)) { requestHandler = new JawrBinaryResourceRequestHandler(getServletContext(), getServletConfig()); } else { requestHandler = new JawrRequestHandler(getServletContext(), getServletConfig()); } } catch (ServletException e) { Marker fatal = MarkerFactory.getMarker("FATAL"); LOGGER.error(fatal, "Jawr servlet with name " + getServletConfig().getServletName() + " failed to initialize properly. "); LOGGER.error(fatal, "Cause:"); LOGGER.error(fatal, e.getMessage(), e); throw e; } catch (Throwable e) { Marker fatal = MarkerFactory.getMarker("FATAL"); LOGGER.error(fatal, "Jawr servlet with name " + getServletConfig().getServletName() + " failed to initialize properly. "); LOGGER.error(fatal, "Cause: "); LOGGER.error(fatal, e.getMessage(), e); throw new ServletException(e); } }
/* (non-Javadoc) @see javax.servlet.GenericServlet#init()
@Override public int doEndTag() throws JspException { try { BinaryResourcesHandler rsHandler = null; if ((rsHandler = (BinaryResourcesHandler) pageContext.getServletContext() .getAttribute(JawrConstant.BINARY_CONTEXT_ATTRIBUTE)) == null) throw new IllegalStateException( "Binary ResourceBundlesHandler not present in servlet context. Initialization of Jawr either failed or never occurred."); JawrConfig jawrConfig = rsHandler.getConfig(); this.renderer = RendererFactory.getImgRenderer(jawrConfig, isPlainImage()); this.renderer.renderImage(getImgSrcToRender(), getAttributeMap(), pageContext.getOut()); } catch (IOException e) { throw new JspException(e); } finally { // Reset the Thread local for the Jawr context ThreadLocalJawrContext.reset(); } return (EVAL_PAGE); }
Render the IMG tag. @throws JspException if a JSP exception has occurred
public static void bottlesExample() { // Create all the concepts Concept bottle = Factory.createNamedConcept("bottle"); Concept plasticBottle = Factory.createNamedConcept("plasticBottle"); Concept glassBottle = Factory.createNamedConcept("glassBottle"); Concept purplePlasticBottle = Factory.createNamedConcept("purplePlasticBottle"); Concept plastic = Factory.createNamedConcept("plastic"); Concept tallBottle = Factory.createNamedConcept("tallBottle"); Concept wideBottle = Factory.createNamedConcept("wideBottle"); Concept wineBottle = Factory.createNamedConcept("wineBottle"); // Create all the roles Role isMadeOf = Factory.createNamedRole("isMadeOf"); // Create all the features Feature hasHeight = Factory.createNamedFeature("hasHeight"); Feature hasWidth = Factory.createNamedFeature("hasWidth"); Set<Axiom> axioms = new HashSet<Axiom>(); // This is an example of a primitive child with no roles. Axiom a0 = new ConceptInclusion(glassBottle, bottle); axioms.add(a0); // This is an example of a fully defined child with one role. In this // case two axioms are needed because the API does not support // equivalence directly. Axiom a1 = new ConceptInclusion( plasticBottle, Factory.createConjunction( bottle, Factory.createExistential(isMadeOf, plastic) ) ); Axiom a1b = new ConceptInclusion( Factory.createConjunction( bottle, Factory.createExistential(isMadeOf, plastic) ), plasticBottle ); axioms.add(a1); axioms.add(a1b); // This is an example of a primitive child with no roles Axiom a2 = new ConceptInclusion( purplePlasticBottle, plasticBottle ); axioms.add(a2); // This is an example of a fully defined child with a concrete domain Axiom a3 = new ConceptInclusion( tallBottle, Factory.createConjunction( bottle, Factory.createDatatype( hasHeight, Operator.GREATER_THAN, Factory.createIntegerLiteral(5)) ) ); Axiom a3b = new ConceptInclusion( Factory.createConjunction( bottle, Factory.createDatatype( hasHeight, Operator.GREATER_THAN, Factory.createIntegerLiteral(5)) ), tallBottle ); axioms.add(a3); axioms.add(a3b); // This is another example of a fully defined child with a concrete // domain Axiom a4 = new ConceptInclusion( wideBottle, Factory.createConjunction( bottle, Factory.createDatatype( hasWidth, Operator.GREATER_THAN, Factory.createIntegerLiteral(5)) ) ); Axiom a4b = new ConceptInclusion( Factory.createConjunction( bottle, Factory.createDatatype( hasWidth, Operator.GREATER_THAN, Factory.createIntegerLiteral(5)) ), wideBottle ); axioms.add(a4); axioms.add(a4b); // Yet another example of a fully defined child with a concrete domain Axiom a5 = new ConceptInclusion( wineBottle, Factory.createConjunction( glassBottle, Factory.createDatatype( hasWidth, Operator.EQUALS, Factory.createIntegerLiteral(2)), Factory.createDatatype( hasHeight, Operator.EQUALS, Factory.createIntegerLiteral(6)) ) ); Axiom a5b = new ConceptInclusion( Factory.createConjunction( glassBottle, Factory.createDatatype( hasWidth, Operator.EQUALS, Factory.createIntegerLiteral(2)), Factory.createDatatype( hasHeight, Operator.EQUALS, Factory.createIntegerLiteral(6)) ), wineBottle ); axioms.add(a5); axioms.add(a5b); // Create a classifier and classify the axioms IReasoner r = new SnorocketReasoner(); r.loadAxioms(axioms); r = r.classify(); // Get only the taxonomy Ontology res = r.getClassifiedOntology(); Utils.printTaxonomy(res.getTopNode(), res.getBottomNode()); }
Shows how to create the following axioms: <ol> <li>Primitive child with no roles</li> <li>Fully defined child with one or more roles</li> <li>Fully defined child with a concrete domain</li> </ol>
@Override public int compare(ResourceGenerator o1, ResourceGenerator o2) { ResolverComparator rComparator = new ResolverComparator(); return rComparator.compare(o1.getResolver(), o2.getResolver()); }
/* (non-Javadoc) @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
@Override @ConstantTime @SuppressWarnings("unchecked") public K findMin() { if (size + insertionBufferSize == 0) { throw new NoSuchElementException(); } if (insertionBufferSize == 0) { return array[0]; } else if (size == 0) { return insertionBuffer[insertionBufferMinPos]; } else { K insertionBufferMin = insertionBuffer[insertionBufferMinPos]; if (comparator == null) { if (((Comparable<? super K>) array[0]).compareTo(insertionBufferMin) <= 0) { return array[0]; } else { return insertionBufferMin; } } else { if (comparator.compare(array[0], insertionBufferMin) <= 0) { return array[0]; } else { return insertionBufferMin; } } } }
{@inheritDoc}
@Override @SuppressWarnings("unchecked") @ConstantTime(amortized = true) public void insert(K key) { if (key == null) { throw new NullPointerException("Null keys not permitted"); } // add in buffer insertionBuffer[insertionBufferSize++] = key; if (isBulkInsertionBufferFull()) { if (size + insertionBufferSize > array.length) { // first try to double size if (array.length == 0) { ensureCapacity(1); } else { ensureCapacity(2 * array.length); } // if not enough, set to requested size ensureCapacity(size + insertionBufferSize); } if (comparator == null) { bulkInsert(); } else { bulkInsertWithComparator(); } } else if (insertionBufferSize > 1) { // update minimum K insertionBufferMin = insertionBuffer[insertionBufferMinPos]; if (comparator == null) { if (((Comparable<? super K>) key).compareTo(insertionBufferMin) < 0) { insertionBufferMinPos = insertionBufferSize - 1; } } else { if (comparator.compare(key, insertionBufferMin) < 0) { insertionBufferMinPos = insertionBufferSize - 1; } } } }
{@inheritDoc}
@Override @SuppressWarnings("unchecked") @LogarithmicTime(amortized = true) public K deleteMin() { if (size + insertionBufferSize == 0) { throw new NoSuchElementException(); } // where is the minimum boolean deleteFromInsertionBuffer = false; if (size == 0) { deleteFromInsertionBuffer = true; } else if (insertionBufferSize > 0) { K arrayMin = array[0]; K insertionBufferMin = insertionBuffer[insertionBufferMinPos]; if (comparator == null) { if (((Comparable<? super K>) insertionBufferMin).compareTo(arrayMin) < 0) { deleteFromInsertionBuffer = true; } } else { if (comparator.compare(insertionBufferMin, arrayMin) < 0) { deleteFromInsertionBuffer = true; } } } K result; if (deleteFromInsertionBuffer) { result = insertionBuffer[insertionBufferMinPos]; insertionBuffer[insertionBufferMinPos] = insertionBuffer[insertionBufferSize - 1]; insertionBuffer[insertionBufferSize - 1] = null; insertionBufferSize--; insertionBufferMinPos = 0; if (comparator == null) { for (int i = 1; i < insertionBufferSize; i++) { if (((Comparable<? super K>) insertionBuffer[i]) .compareTo(insertionBuffer[insertionBufferMinPos]) < 0) { insertionBufferMinPos = i; } } } else { for (int i = 1; i < insertionBufferSize; i++) { if (comparator.compare(insertionBuffer[i], insertionBuffer[insertionBufferMinPos]) < 0) { insertionBufferMinPos = i; } } } } else { result = array[0]; size--; array[0] = array[size]; array[size] = null; if (size > 1) { if (comparator == null) { fixdown(0); } else { fixdownWithComparator(0); } } if (minCapacity <= array.length && 4 * size < array.length) { ensureCapacity(array.length / 2); } } return result; }
{@inheritDoc}
@LinearTime public static <K> BinaryArrayBulkInsertWeakHeap<K> heapify(K[] array) { if (array == null) { throw new IllegalArgumentException("Array cannot be null"); } if (array.length == 0) { return new BinaryArrayBulkInsertWeakHeap<K>(); } BinaryArrayBulkInsertWeakHeap<K> h = new BinaryArrayBulkInsertWeakHeap<K>(array.length); System.arraycopy(array, 0, h.array, 0, array.length); h.size = array.length; for (int j = h.size - 1; j > 0; j--) { h.join(h.dancestor(j), j); } return h; }
Create a heap from an array of elements. The elements of the array are not destroyed. The method has linear time complexity. @param <K> the type of keys maintained by the heap @param array an array of elements @return a binary heap @throws IllegalArgumentException in case the array is null
@LinearTime public static <K> BinaryArrayBulkInsertWeakHeap<K> heapify(K[] array, Comparator<? super K> comparator) { if (array == null) { throw new IllegalArgumentException("Array cannot be null"); } if (array.length == 0) { return new BinaryArrayBulkInsertWeakHeap<K>(comparator); } BinaryArrayBulkInsertWeakHeap<K> h = new BinaryArrayBulkInsertWeakHeap<K>(comparator, array.length); System.arraycopy(array, 0, h.array, 0, array.length); h.size = array.length; for (int j = h.size - 1; j > 0; j--) { h.joinWithComparator(h.dancestor(j), j); } return h; }
Create a heap from an array of elements. The elements of the array are not destroyed. The method has linear time complexity. @param <K> the type of keys maintained by the heap @param array an array of elements @param comparator the comparator to use @return a binary heap @throws IllegalArgumentException in case the array is null
protected boolean isBulkInsertionBufferFull() { if (insertionBufferSize >= insertionBuffer.length) { return true; } double sizeAsDouble = size + insertionBufferSize; return Math.getExponent(sizeAsDouble) + 3 >= insertionBuffer.length; }
Check if the bulk insertion buffer is full. @return true if the bulk insertion buffer is full, false otherwise
protected void bulkInsert() { if (insertionBufferSize == 0) { return; } int right = size + insertionBufferSize - 2; int left = Math.max(size, right / 2); while (insertionBufferSize > 0) { --insertionBufferSize; array[size] = insertionBuffer[insertionBufferSize]; insertionBuffer[insertionBufferSize] = null; reverse.clear(size); ++size; } while (right > left + 1) { left = left / 2; right = right / 2; for (int j = left; j <= right; j++) { fixdown(j); } } if (left != 0) { int i = dancestor(left); fixdown(i); fixup(i); } if (right != 0) { int i = dancestor(right); fixdown(i); fixup(i); } insertionBufferMinPos = 0; }
Bulk insert from insertion buffer into the weak heap.
protected void bulkInsertWithComparator() { if (insertionBufferSize == 0) { return; } int right = size + insertionBufferSize - 2; int left = Math.max(size, right / 2); while (insertionBufferSize > 0) { --insertionBufferSize; array[size] = insertionBuffer[insertionBufferSize]; insertionBuffer[insertionBufferSize] = null; reverse.clear(size); ++size; } while (right > left + 1) { left = left / 2; right = right / 2; for (int j = left; j <= right; j++) { fixdownWithComparator(j); } } if (left != 0) { int i = dancestor(left); fixdownWithComparator(i); fixupWithComparator(i); } if (right != 0) { int i = dancestor(right); fixdownWithComparator(i); fixupWithComparator(i); } insertionBufferMinPos = 0; }
Bulk insert from insertion buffer into the weak heap.
protected void initCommonGenerators() { commonGenerators.put(new PrefixedPathResolver(MESSAGE_BUNDLE_PREFIX), ResourceBundleMessagesGenerator.class); Class<?> classPathGeneratorClass = null; Class<?> webJarsGeneratorClass = null; boolean isWebJarsLocatorPresent = ClassLoaderResourceUtils.isClassPresent(WEBJARS_LOCATOR_CLASSNAME); if (resourceType.equals(JawrConstant.JS_TYPE)) { classPathGeneratorClass = ClasspathJSGenerator.class; if (isWebJarsLocatorPresent) { webJarsGeneratorClass = WebJarsLocatorJSGenerator.class; } else { webJarsGeneratorClass = WebJarsJSGenerator.class; } } else if (resourceType.equals(JawrConstant.CSS_TYPE)) { classPathGeneratorClass = ClassPathCSSGenerator.class; if (isWebJarsLocatorPresent) { webJarsGeneratorClass = WebJarsLocatorCssGenerator.class; } else { webJarsGeneratorClass = WebJarsCssGenerator.class; } } else { classPathGeneratorClass = ClassPathBinaryResourceGenerator.class; if (isWebJarsLocatorPresent) { webJarsGeneratorClass = WebJarsLocatorBinaryResourceGenerator.class; } else { webJarsGeneratorClass = WebJarsBinaryResourceGenerator.class; } } commonGenerators.put(new PrefixedPathResolver(CLASSPATH_RESOURCE_BUNDLE_PREFIX), classPathGeneratorClass); commonGenerators.put(new PrefixedPathResolver(WEBJARS_GENERATOR_PREFIX), webJarsGeneratorClass); if (resourceType.equals(JawrConstant.JS_TYPE)) { commonGenerators.put(new PrefixedPathResolver(COMMONS_VALIDATOR_PREFIX), CommonsValidatorGenerator.class); commonGenerators.put(new PrefixedPathResolver(SKIN_SWTICHER_GENERATOR_PREFIX), SkinSwitcherJsGenerator.class); commonGenerators.put(new SuffixedPathResolver(COFEESCRIPT_GENERATOR_SUFFIX), CoffeeScriptGenerator.class); } if (resourceType.equals(JawrConstant.CSS_TYPE)) { commonGenerators.put(new PrefixedPathResolver(IE_CSS_GENERATOR_PREFIX), IECssBundleGenerator.class); commonGenerators.put(new PrefixedPathResolver(SKIN_GENERATOR_PREFIX), CssSkinGenerator.class); commonGenerators.put(new SuffixedPathResolver(LESS_GENERATOR_SUFFIX), LessCssGenerator.class); String sassGenerator = config.getProperty(SASS_GENERATOR_TYPE, SASS_GENERATOR_VAADIN); if (!sassGenerator.equals(SASS_GENERATOR_VAADIN) && !sassGenerator.equals(SASS_GENERATOR_RUBY)) { throw new BundlingProcessException("The value '" + sassGenerator + "' is not allowed for property '" + SASS_GENERATOR_TYPE + "'. Please check your configuration."); } if (sassGenerator.equals(SASS_GENERATOR_VAADIN)) { commonGenerators.put(new SuffixedPathResolver(SASS_GENERATOR_SUFFIX), SassVaadinGenerator.class); } else { commonGenerators.put(new SuffixedPathResolver(SASS_GENERATOR_SUFFIX), SassRubyGenerator.class); } } if ((resourceType.equals(JawrConstant.CSS_TYPE) || resourceType.equals(JawrConstant.BINARY_TYPE))) { commonGenerators.put(new PrefixedPathResolver(SPRITE_GENERATOR_PREFIX), SpriteGenerator.class); } }
Initialize the common generators
@Override protected BundleRenderer createRenderer(ResourceBundlesHandler rsHandler, Boolean useRandomParam) { return RendererFactory.getCssBundleRenderer(rsHandler, useRandomParam, this.media, this.alternate, this.displayAlternate, this.title); }
/* (non-Javadoc) @see net.jawr.web.taglib.AbstractResourceBundleTag#createRenderer(boolean)
@Override public void release() { super.release(); alternate = false; displayAlternate = false; title = null; media = null; }
/* (non-Javadoc) @see javax.servlet.jsp.tagext.TagSupport#release()
public static Map<Object, Object> getSupportedProperties(Object ref) { if (null == supportedMIMETypes) { synchronized (MIMETypesSupport.class) { if (null == supportedMIMETypes) { // Load the supported MIME types out of a properties file try (InputStream is = ClassLoaderResourceUtils.getResourceAsStream(MIME_PROPS_LOCATION, ref)) { supportedMIMETypes = new Properties(); supportedMIMETypes.load(is); } catch (FileNotFoundException e) { throw new BundlingProcessException( "Error retrieving " + MIME_PROPS_LOCATION + ". Please check your classloader settings"); } catch (IOException e) { throw new BundlingProcessException( "Error retrieving " + MIME_PROPS_LOCATION + ". Please check your classloader settings"); } } } } return supportedMIMETypes; }
Returns a Map object containing all the supported media extensions, paired to their MIME type. @param ref An object reference to anchor the classpath (any 'this' reference does). @return
@Override @SuppressWarnings("unchecked") protected void ensureCapacity(int capacity) { checkCapacity(capacity); K[] newArray = (K[]) new Object[capacity + 1]; System.arraycopy(array, 1, newArray, 1, size); array = newArray; }
Ensure that the array representation has the necessary capacity. @param capacity the requested capacity
@Override protected int compare(Long o1, Long o2) { if (o1 < o2) { return -1; } else if (o1 > o2) { return 1; } else { return 0; } }
{@inheritDoc}
@Override protected int msd(Long a, Long b) { /* * Value equal */ if (a.longValue() == b.longValue()) { return -1; } /* * This is a fast way to compute floor(log_2(a xor b)). */ double axorb = a ^ b; return Math.getExponent(axorb); }
{@inheritDoc}
public CreateSingleRequestBuilder<T,ID> create(T item) { return new CreateSingleRequestBuilder<>(version,type,entityName,item); }
Create a request builder for a create operation for a single item @param item The item to create @return The request builder
public CreateCollectionRequestBuilder<T,ID> create(List<T> items) { return new CreateCollectionRequestBuilder<T,ID>(version,type,entityName,items); }
Create a request builder for a create operation for a multiple items @param items The items to create @return The request builder
public DeleteRequestBuilder<T,ID> delete(ID... ids) { return new DeleteRequestBuilder<T, ID>(version,type,entityName,ids); }
Create a request builder for a delete operation using the specified IDs @param ids The ids to delete @return The request builder
public DeleteRequestBuilder<T,ID> delete(List<ID> ids) { return new DeleteRequestBuilder<T, ID>(version,type,entityName, (ID[]) ids.toArray(new Serializable[ids.size()])); }
Create a request builder for a delete operation using the specified IDs @param ids The ids to delete @return The request builder
public QueryRequestBuilder<T,ID> query(QueryExpression query) { return new QueryRequestBuilder<T, ID>(version,type,entityName,query.build()); }
Create a request builder for a query operation using the specified query expression @param query The the query expression @return The request builder
public QueryRequestBuilder<T,ID> query() { return new QueryRequestBuilder<T, ID>(version,type,entityName); }
Create a request builder for a query operation using the specified no query @return The request builder
public GetRequestBuilder<T,ID> find(ID... ids) { return new GetRequestBuilder<T, ID>(version,type,entityName,ids); }
Create a request builder for a find operation using the specified IDs @param ids The ids to find @return The request builder
public UpdateSingleRequestBuilder<T,ID> update(T item) { return new UpdateSingleRequestBuilder<>(version,type,entityName,item); }
Create a request builder for an update operation for a single item @param item The item to update @return The request builder
public UpdateCollectionRequestBuilder<T,ID> update(List<T> items) { return new UpdateCollectionRequestBuilder<T, ID>(version,type,entityName,items); }
Create a request builder for a create operation for a multiple items @param items The item to update @return The request builder
@Override protected BundlePathMappingBuilder createBundlePathMappingBuilder(String fileExtension, ResourceReaderHandler resourceReaderHandler, GeneratorRegistry generatorRegistry) { return new CompositeBundlePathMappingBuilder(this, fileExtension, generatorRegistry, resourceReaderHandler); }
/* (non-Javadoc) @see net.jawr.web.resource.bundle.JoinableResourceBundleImpl# createBundlePathMappingBuilder(java.lang.String, net.jawr.web.resource.handler.reader.ResourceReaderHandler, net.jawr.web.resource.bundle.generator.GeneratorRegistry)
private void initCompositeBundleMap(List<JoinableResourceBundle> bundles) { for (JoinableResourceBundle bundle : bundles) { if (bundle.isComposite()) { List<JoinableResourceBundle> childBundles = ((CompositeResourceBundle) bundle).getChildBundles(); for (JoinableResourceBundle childBundle : childBundles) { List<JoinableResourceBundle> associatedBundles = compositeResourceBundleMap .get(childBundle.getId()); if (associatedBundles == null) { associatedBundles = new ArrayList<>(); } associatedBundles.add(bundle); compositeResourceBundleMap.put(childBundle.getId(), associatedBundles); } } } }
Initialize the composite bundle map @param bundles list of resource bundle
private List<JoinableResourceBundle> getBundlesToRebuild() { List<JoinableResourceBundle> bundlesToRebuild = new ArrayList<>(); if (config.getUseSmartBundling()) { for (JoinableResourceBundle bundle : globalBundles) { if (bundle.isDirty()) { bundlesToRebuild.add(bundle); } } for (JoinableResourceBundle bundle : contextBundles) { if (bundle.isDirty()) { bundlesToRebuild.add(bundle); } } } return bundlesToRebuild; }
Returns the bundles which needs to be rebuild @return the bundles which needs to be rebuild
private void initModulesArgs(Map<String, String> resultBundlePathMapping, List<String> args, List<JoinableResourceBundle> bundles, Map<String, JoinableResourceBundle> bundleMap, String modules, List<String> depModulesArgs, List<String> globalBundleDependencies) { // Define Jawr root module // The JAWR_ROOT_MODULE is a fake module to give a root module to the // dependency graph // This is it's only purpose. It is the root dependency for any module // This is used because Google Closure use a unique module as root for // dependency management // in advance mode args.add(JS_ARG); args.add(JAWR_ROOT_MODULE_JS); args.add(MODULE_ARG); args.add(JAWR_ROOT_MODULE_NAME + ":1:"); resultBundlePathMapping.put(JAWR_ROOT_MODULE_NAME, JAWR_ROOT_MODULE_JS); if (StringUtils.isNotEmpty(modules)) { String[] moduleSpecs = modules.split(";"); for (String moduleSpec : moduleSpecs) { int moduleNameSeparatorIdx = moduleSpec.indexOf(":"); if (moduleNameSeparatorIdx < 0) { throw new BundlingProcessException( "The property 'jawr.js.closure.modules' is not properly defined. Please check your configuration."); } // Check module name String bundleName = moduleSpec.substring(0, moduleNameSeparatorIdx); checkBundleName(bundleName, bundleMap); JoinableResourceBundle bundle = bundleMap.get(bundleName); List<String> dependencies = Arrays .asList(moduleSpec.substring(moduleNameSeparatorIdx + 1).split(MODULE_DEPENDENCIES_SEPARATOR)); dependencies.addAll(0, globalBundleDependencies); generateBundleModuleArgs(depModulesArgs, bundleMap, resultBundlePathMapping, bundle, dependencies); // Remove the bundle from the list of bundle to treat bundles.remove(bundle); } } }
Initialize the modules arguments @param resultBundlePathMapping the map for the result bundle path @param args the arguments @param bundles the list of bundles @param bundleMap the bundle map @param modules the modules @param depModulesArgs the dependency modules arguments @param globalBundleDependencies the global bundle dependencies
public IConceptSet lookupConcept(int r) { if (r >= data.length) { return IConceptSet.EMPTY_SET; } if (null == data[r]) { return IConceptSet.EMPTY_SET; } else { return new ReadonlyConceptSet(data[r]); } }
Returns the set of concepts associated to the concept in a {@link Context} by role r. @param r The role @return The set of concepts associated to the concept in the context.
public int[] getRoles() { List<Integer> roles = new ArrayList<Integer>(); for(int i = 0; i < data.length; i++) { if(data[i] != null) { roles.add(i); } } int[] res = new int[roles.size()]; for(int i = 0; i < res.length; i++) { res[i] = roles.get(i); } return res; }
Returns the roles stored in this data structure. @return
@Override public void contextInitialized(ServletContextEvent evt) { ServletContext sc = evt.getServletContext(); // Initialize Jawr JS servlet ServletRegistration.Dynamic sr = sc.addServlet("JavascriptServlet", "net.jawr.web.servlet.JawrServlet"); sr.setInitParameter("configLocation", "/jawr.properties"); sr.addMapping("*.js"); sr.setLoadOnStartup(0); // Initialize Jawr CSS servlet sr = sc.addServlet("CssServlet", "net.jawr.web.servlet.JawrServlet"); sr.setInitParameter("configLocation", "/jawr.properties"); sr.setInitParameter("type", JawrConstant.CSS_TYPE); sr.addMapping("*.css"); sr.setLoadOnStartup(1); // Initialize Jawr Binary servlet sr = sc.addServlet("BinaryServlet", "net.jawr.web.servlet.JawrServlet"); sr.setInitParameter("configLocation", "/jawr.properties"); sr.setInitParameter("type", JawrConstant.BINARY_TYPE); sr.addMapping("*.jpg", "*.png", "*.gif", "*.woff", "*.ttf", "*.svg", "*.eot"); sr.setLoadOnStartup(0); }
/* (non-Javadoc) @see javax.servlet.ServletContextListener#contextInitialized(javax.servlet .ServletContextEvent)
@Override public boolean writeResponseHeader(String requestedPath, HttpServletRequest request, HttpServletResponse response) throws IOException { LOGGER.debug("Illegal access to bundle : " + requestedPath + ". The hashcode don't match the existing one."); response.sendError(HttpServletResponse.SC_NOT_FOUND); return true; }
/* (non-Javadoc) @see net.jawr.web.servlet.IllegalBundleRequestHandler#writeResponseHeader( java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
@Override public InputSource resolveStylesheet(String identifier, ScssStylesheet parentStylesheet) { return resolver.resolve(parentStylesheet, identifier); }
/* (non-Javadoc) @see com.vaadin.sass.internal.ScssStylesheet#resolveStylesheet(java.lang. String, com.vaadin.sass.internal.ScssStylesheet)
public long getContentLength() { try { return getInternalResponse().body().contentLength(); } catch (NullPointerException e){ logger.error("Failed to get the response content length from " + getRequestURL() + ". Error: " + e.getMessage()); return 0; } }
This method gets the Content-Length of the response body. @return The content length of the response.
public String getResponseText() { if (this.bodyBytes == null) { return ""; } Charset charset = contentType != null ? contentType.charset(UTF_8) : UTF_8; try { return new String(this.bodyBytes, charset.name()); } catch (UnsupportedEncodingException e) { logger.warn("Failed to extract text from response body. Error: " + e.getMessage()); return null; } }
This method parses the response body as a String. If this method is called, then subsequent calls to {@link #getResponseByteStream()} or {@link #getResponseBytes()} will return null unless the {@link Request} was made using a <code>download()</code> method. @return The body of the response as a String. Empty string if there is no body.
public JSONObject getResponseJSON() { String responseText = getResponseText(); if(responseText == null || responseText.length() == 0){ return null; } try { return new JSONObject(responseText); } catch (JSONException e) { logger.warn("Failed to extract JSON from response body. Error: " + e.getMessage()); return null; } }
This method parses the response body as a JSONObject. If this method is called, then subsequent calls to {@link #getResponseByteStream()} or {@link #getResponseBytes()} will return null unless the {@link Request} was made using a <code>download()</code> method. @return The body of the response as a JSONObject.
public byte[] getResponseBytes() { if (responseByteStream != null) { try { return IOUtils.toByteArray(responseByteStream); } catch (IOException e) { logger.warn("Failed to extract byte array from response body. Error: " + e.getMessage()); return null; } } return this.bodyBytes; }
This method gets the bytes of the response body. If this method is called, then subsequent calls to {@link #getResponseByteStream()} or {@link #getResponseBytes()} will return null unless the {@link Request} was made using a <code>download()</code> method. @return the bytes of the response body. Will be null if there is no body.
public Map<String, List<String>> getHeaders() { if (headers == null) { return null; } return headers.toMultimap(); }
Get the HTTP headers from the response. @return A map with all the headers, and the corresponding values for each one.
public List<String> getHeader(String name) { if (headers == null) { return null; } return headers.values(name); }
Get the header values for the given header name, if it exists. There can be more than one value for a given header name. @param name the name of the header to get @return the values of the given header name
public String getFirstHeader(String name) { List<String> headerValues = getHeader(name); if (headerValues == null || headerValues.size() == 0) { return null; } return headerValues.get(0); }
Get the first header value for the given header name, if it exists. @param name the name of the header to get @return the first value of the given header name
@Override public void meld(MergeableHeap<K> other) { BinaryTreeSoftHeap<K> h = (BinaryTreeSoftHeap<K>) other; // check same comparator if (comparator != null) { if (h.comparator == null || !h.comparator.equals(comparator)) { throw new IllegalArgumentException("Cannot meld heaps using different comparators!"); } } else if (h.comparator != null) { throw new IllegalArgumentException("Cannot meld heaps using different comparators!"); } if (rankLimit != h.rankLimit) { throw new IllegalArgumentException("Cannot meld heaps with different error rates!"); } // perform the meld mergeInto(h.rootList.head, h.rootList.tail); size += h.size; // clear other h.size = 0; h.rootList.head = null; h.rootList.tail = null; }
{@inheritDoc} @throws IllegalArgumentException if {@code other} has a different error rate
@Override public void insert(K key) { /* * Create a single element heap */ SoftHandle<K> n = new SoftHandle<K>(key); TreeNode<K> treeNode = new TreeNode<K>(n); RootListNode<K> rootListNode = new RootListNode<K>(treeNode); /* * Merge new list into old list */ mergeInto(rootListNode, rootListNode); size++; }
{@inheritDoc}
@Override public K findMin() { if (size == 0) { throw new NoSuchElementException(); } return rootList.head.suffixMin.root.cHead.key; }
{@inheritDoc}
@Override public K deleteMin() { if (size == 0) { throw new NoSuchElementException(); } // find tree with minimum RootListNode<K> minRootListNode = rootList.head.suffixMin; TreeNode<K> root = minRootListNode.root; // remove from list SoftHandle<K> result = root.cHead; root.cHead = result.next; root.cSize--; // replenish keys if needed if (root.cSize <= targetSize(root.rank) / 2) { if (root.left != null || root.right != null) { // get keys from children sift(root); updateSuffixMin(minRootListNode); } else if (root.cSize == 0) { // no children and empty list, just remove the tree RootListNode<K> minRootPrevListNode = minRootListNode.prev; if (minRootPrevListNode != null) { minRootPrevListNode.next = minRootListNode.next; } else { rootList.head = minRootListNode.next; } if (minRootListNode.next != null) { minRootListNode.next.prev = minRootPrevListNode; } else { rootList.tail = minRootPrevListNode; } minRootListNode.prev = null; minRootListNode.next = null; updateSuffixMin(minRootPrevListNode); } } result.next = null; size--; return result.key; }
{@inheritDoc}
@SuppressWarnings("unchecked") private void sift(TreeNode<K> x) { Deque<TreeNode<K>> stack = new ArrayDeque<TreeNode<K>>(); stack.push(x); while (!stack.isEmpty()) { x = stack.peek(); TreeNode<K> xLeft = x.left; TreeNode<K> xRight = x.right; // if leaf or list has enough elements, skip if (xLeft == null && xRight == null || x.cSize >= targetSize(x.rank)) { stack.pop(); continue; } // swap if needed if (xLeft == null || xRight != null && ((comparator == null && ((Comparable<? super K>) xLeft.cKey).compareTo(xRight.cKey) > 0) || (comparator != null && comparator.compare(xLeft.cKey, xRight.cKey) > 0))) { x.left = xRight; x.right = xLeft; xLeft = x.left; xRight = x.right; } // grab non-empty list from left child xLeft.cTail.next = x.cHead; x.cHead = xLeft.cHead; if (x.cTail == null) { x.cTail = xLeft.cTail; } x.cSize += xLeft.cSize; // set new corrupted key x.cKey = xLeft.cKey; // clear left child list xLeft.cKey = null; xLeft.cHead = null; xLeft.cTail = null; xLeft.cSize = 0; // recursively to left child if not a leaf if (xLeft.left != null || xLeft.right != null) { stack.push(xLeft); } else { x.left = null; } } }
Sift elements from children nodes until the current node has enough elements in its list. @param x the node
private TreeNode<K> combine(TreeNode<K> x, TreeNode<K> y) { TreeNode<K> z = new TreeNode<K>(); z.left = x; z.right = y; z.rank = x.rank + 1; sift(z); return z; }
Combine two trees into a new tree. @param x the first tree @param y the second tree @return the combined tree
@Override protected StringBuffer doPostProcessBundle(BundleProcessingStatus status, StringBuffer bundleData) throws IOException { JoinableResourceBundle bundle = status.getCurrentBundle(); Charset charset = status.getJawrConfig().getResourceCharset(); if (bundle.getLicensesPathList().isEmpty()) return bundleData; ByteArrayOutputStream baOs = new ByteArrayOutputStream(); WritableByteChannel wrChannel = Channels.newChannel(baOs); Writer writer = Channels.newWriter(wrChannel, charset.name()); try (BufferedWriter bwriter = new BufferedWriter(writer)) { for (Iterator<String> it = bundle.getLicensesPathList().iterator(); it.hasNext();) { String path = it.next(); if (LOGGER.isDebugEnabled()) LOGGER.debug("Adding license file: " + path); Reader rd = null; try { rd = status.getRsReader().getResource(bundle, path); } catch (ResourceNotFoundException e) { throw new BundlingProcessException( "Unexpected ResourceNotFoundException when reading a sorting file [" + path + "]"); } try (BufferedReader bRd = new BufferedReader(rd)) { // Make a buffered reader, to read line by line. String line = bRd.readLine(); // Write each line and the corresponding new line. while (line != null) { bwriter.write(line); if (((line = bRd.readLine()) != null) || it.hasNext()) bwriter.newLine(); } } } } return new StringBuffer(baOs.toString(charset.name())).append(bundleData); }
/* (non-Javadoc) @see net.jawr.web.resource.bundle.postprocess.impl. AbstractChainedResourceBundlePostProcessor #doPostProcessBundle(net.jawr.web.resource.bundle.JoinableResourceBundle, java.lang.StringBuffer)
@Override public ResourceBundlePostProcessor buildDefaultProcessorChain() { ChainedResourceBundlePostProcessor processor = new CSSMinPostProcessor(); processor.addNextProcessor(buildLicensesProcessor()); return processor; }
/* (non-Javadoc) @see net.jawr.web.resource.bundle.factory.processor.PostProcessorChainFactory# buildDefaultProcessorChain()
@Override protected AbstractChainedResourceBundlePostProcessor buildProcessorByKey(String processorKey) { if (PostProcessFactoryConstant.LICENSE_INCLUDER.equals(processorKey)) return buildLicensesProcessor(); else if (PostProcessFactoryConstant.CSS_MINIFIER.equals(processorKey)) return new CSSMinPostProcessor(); else if (PostProcessFactoryConstant.CSS_IMPORT.equals(processorKey)) return new CSSImportPostProcessor(); else if (PostProcessFactoryConstant.CSS_CHARSET_FILTER.equals(processorKey)) return new CssCharsetFilterPostProcessor(); else if (PostProcessFactoryConstant.CSS_COMBINE_MEDIA.equals(processorKey)) return new CSSCombineMediaPostProcessor(); else if (PostProcessFactoryConstant.URL_PATH_REWRITER.equals(processorKey)) return new CSSURLPathRewriterPostProcessor(); else if (PostProcessFactoryConstant.BASE64_IMAGE_ENCODER.equals(processorKey)) return new Base64ImageEncoderPostProcessor(); else if (PostProcessFactoryConstant.YUI_COMPRESSOR.equals(processorKey)) return new YUICSSCompressor(); else if (PostProcessFactoryConstant.AUTOPREFIXER.equals(processorKey)) return new AutoPrefixerPostProcessor(); else throw new IllegalArgumentException("The supplied key [" + processorKey + "] is not bound to any ResourceBundlePostProcessor. Please check the documentation for valid keys. "); }
/* (non-Javadoc) @see net.jawr.web.resource.bundle.factory.processor.PostProcessorChainFactory# getPostProcessor(java.lang.String)
@Override @ConstantTime(amortized = true) public void insert(K key) { if (key == null) { throw new IllegalArgumentException("Null keys not permitted"); } if (compare(key, maxKey) > 0) { throw new IllegalArgumentException("Key is more than the maximum allowed key"); } if (compare(key, lastDeletedKey) < 0) { throw new IllegalArgumentException("Invalid key. Monotone heap."); } int b = computeBucket(key, lastDeletedKey); buckets[b].add(key); // update current minimum cache if (currentMin == null || compare(key, currentMin) < 0) { currentMin = key; currentMinBucket = b; currentMinPos = buckets[b].size() - 1; } size++; }
{@inheritDoc} @throws IllegalArgumentException if the key is null @throws IllegalArgumentException if the key is less than the minimum allowed key @throws IllegalArgumentException if the key is more than the maximum allowed key @throws IllegalArgumentException if the key is less than the last deleted key (or the minimum key allowed if no key has been deleted)
@Override @LogarithmicTime(amortized = true) public K deleteMin() { if (size == 0) { throw new NoSuchElementException(); } // updated last deleted key lastDeletedKey = currentMin; if (currentMinBucket == 0) { buckets[currentMinBucket].remove(currentMinPos); // update minimum cache currentMin = null; currentMinBucket = EMPTY; currentMinPos = EMPTY; if (--size > 0) { findAndCacheMinimum(0); } } else { K newMin = null; int newMinBucket = EMPTY; int newMinPos = EMPTY; // redistribute all elements based on new lastDeletedKey int pos = 0; for (K val : buckets[currentMinBucket]) { if (pos != currentMinPos) { int b = computeBucket(val, lastDeletedKey); assert b < currentMinBucket; buckets[b].add(val); if (newMin == null || compare(val, newMin) < 0) { newMin = val; newMinBucket = b; newMinPos = buckets[b].size() - 1; } } ++pos; } buckets[currentMinBucket].clear(); // update minimum cache currentMin = newMin; currentMinBucket = newMinBucket; currentMinPos = newMinPos; if (--size > 0) { findAndCacheMinimum(currentMinBucket + 1); } } return lastDeletedKey; }
{@inheritDoc} The cost of this operation is amortized O(logC) assuming the heap contains keys in the range [0, C] or equivalently [a, a+C].
@Override public void clear() { for (List<K> bucket : buckets) { bucket.clear(); } size = 0; lastDeletedKey = minKey; currentMin = null; currentMinBucket = EMPTY; currentMinPos = EMPTY; }
{@inheritDoc}
protected int computeBucket(K key, K minKey) { return 1 + Math.min(msd(key, minKey), buckets.length - 2); }
Compute the bucket of a key based on a minimum key. @param key the key @param minKey the minimum key @return the bucket where the key should go
private void findAndCacheMinimum(int firstBucket) { if (currentMin == null) { // find first non-empty bucket currentMinBucket = EMPTY; for (int i = firstBucket; i < this.buckets.length; i++) { if (!buckets[i].isEmpty()) { currentMinBucket = i; break; } } // find new minimum and its position (beware of cached values) currentMinPos = EMPTY; if (currentMinBucket >= 0) { int pos = 0; for (K val : buckets[currentMinBucket]) { if (currentMin == null || compare(val, currentMin) < 0) { currentMin = val; currentMinPos = pos; } ++pos; } } } }
Helper method for finding and caching the minimum. Assumes that the heap contains at least one element. @param firstBucket start looking for elements from this bucket
public int doStartTag() throws JspException { String string = null; if (srcExpr != null) { string = (String) ExpressionEvaluatorManager.evaluate("srcExpr", srcExpr, String.class, this, pageContext); setSrc(string); } if (typeExpr != null) { string = (String) ExpressionEvaluatorManager.evaluate("typeExpr", typeExpr, String.class, this, pageContext); setType(string); } if (asyncExpr != null) { string = (String) ExpressionEvaluatorManager.evaluate("asyncExpr", asyncExpr, String.class, this, pageContext); setAsync(string); } if (deferExpr != null) { string = (String) ExpressionEvaluatorManager.evaluate("deferExpr", deferExpr, String.class, this, pageContext); setDefer(string); } if (useRandomParamExpr != null) { string = (String) ExpressionEvaluatorManager.evaluate("useRandomParamExpr", useRandomParamExpr, String.class, this, pageContext); setUseRandomParam(string); } return super.doStartTag(); }
/* (non-Javadoc) @see net.jawr.web.taglib.AbstractResourceBundleTag#doStartTag()
@Override public void release() { super.release(); setTypeExpr(null); setSrcExpr(null); setUseRandomParamExpr(null); setAsyncExpr(null); setDeferExpr(null); }
/* (non-Javadoc) @see javax.servlet.jsp.tagext.TagSupport#release()
@Override @LogarithmicTime(amortized = true) @SuppressWarnings("unchecked") public AddressableHeap.Handle<K, V> insert(K key, V value) { if (other != this) { throw new IllegalStateException("A heap cannot be used after a meld"); } if (key == null) { throw new NullPointerException("Null keys not permitted"); } Node<K, V> n = createNode(key, value); // easy special cases if (size == 0) { root = n; size = 1; return n; } else if (size == 1) { int c; if (comparator == null) { c = ((Comparable<? super K>) key).compareTo(root.key); } else { c = comparator.compare(key, root.key); } if (c <= 0) { n.o_c = root; root.y_s = n; root = n; } else { root.o_c = n; n.y_s = root; } size = 2; return n; } if (comparator == null) { root = union(root, n); } else { root = unionWithComparator(root, n); } size++; return n; }
{@inheritDoc}
@Override @ConstantTime public AddressableHeap.Handle<K, V> findMin() { if (size == 0) { throw new NoSuchElementException(); } return root; }
{@inheritDoc}
@Override @LogarithmicTime(amortized = true) public Handle<K, V> deleteMin() { if (size == 0) { throw new NoSuchElementException(); } Node<K, V> oldRoot = root; // easy special cases if (size == 1) { root = null; size = 0; return oldRoot; } else if (size == 2) { root = root.o_c; root.o_c = null; root.y_s = null; size = 1; oldRoot.o_c = null; return oldRoot; } root = unlinkAndUnionChildren(root); size--; return oldRoot; }
{@inheritDoc}
protected void delete(Node<K, V> n) { if (n == root) { deleteMin(); return; } if (n.y_s == null) { throw new IllegalArgumentException("Invalid handle!"); } // disconnect and union children of node Node<K, V> childTree = unlinkAndUnionChildren(n); // find parent Node<K, V> p = getParent(n); // link children tree in place of node if (childTree == null) { // no children, just unlink from parent if (p.o_c == n) { if (n.y_s == p) { p.o_c = null; } else { p.o_c = n.y_s; } } else { p.o_c.y_s = p; } } else { // link children tree to parent if (p.o_c == n) { childTree.y_s = n.y_s; p.o_c = childTree; } else { p.o_c.y_s = childTree; childTree.y_s = p; } } size--; n.o_c = null; n.y_s = null; }
Delete a node from the heap. @param n the node
protected Node<K, V> unlinkAndUnionChildren(Node<K, V> n) { // disconnect children Node<K, V> child1 = n.o_c; if (child1 == null) { return null; } n.o_c = null; Node<K, V> child2 = child1.y_s; if (child2 == n) { child2 = null; } else { child2.y_s = null; } child1.y_s = null; if (comparator == null) { return union(child1, child2); } else { return unionWithComparator(child1, child2); } }
Unlink the two children of a node and union them forming a new tree. @param n the node @return the tree which is formed by the two children subtrees of the node
protected Node<K, V> unlinkRightChild(Node<K, V> n) { Node<K, V> left = n.o_c; if (left == null || left.y_s == n) { return null; } Node<K, V> right = left.y_s; left.y_s = n; right.y_s = null; return right; }
Unlink the right child of a node. @param n the node @return the right child after unlinking
@SuppressWarnings("unchecked") protected Node<K, V> union(Node<K, V> root1, Node<K, V> root2) { if (root1 == null) { return root2; } else if (root2 == null) { return root1; } Node<K, V> newRoot; Node<K, V> cur; // find initial int c = ((Comparable<? super K>) root1.key).compareTo(root2.key); if (c <= 0) { newRoot = root1; root1 = unlinkRightChild(root1); } else { newRoot = root2; root2 = unlinkRightChild(root2); } cur = newRoot; // merge while (root1 != null && root2 != null) { c = ((Comparable<? super K>) root1.key).compareTo(root2.key); if (c <= 0) { // link as left child of cur if (cur.o_c == null) { root1.y_s = cur; } else { root1.y_s = cur.o_c; } cur.o_c = root1; cur = root1; root1 = unlinkRightChild(root1); } else { // link as left child of cur if (cur.o_c == null) { root2.y_s = cur; } else { root2.y_s = cur.o_c; } cur.o_c = root2; cur = root2; root2 = unlinkRightChild(root2); } } while (root1 != null) { // link as left child of cur if (cur.o_c == null) { root1.y_s = cur; } else { root1.y_s = cur.o_c; } cur.o_c = root1; cur = root1; root1 = unlinkRightChild(root1); } while (root2 != null) { // link as left child of cur if (cur.o_c == null) { root2.y_s = cur; } else { root2.y_s = cur.o_c; } cur.o_c = root2; cur = root2; root2 = unlinkRightChild(root2); } return newRoot; }
Top-down union of two skew heaps. @param root1 the root of the first heap @param root2 the root of the right heap @return the new root of the merged heap
protected Node<K, V> unionWithComparator(Node<K, V> root1, Node<K, V> root2) { if (root1 == null) { return root2; } else if (root2 == null) { return root1; } Node<K, V> newRoot; Node<K, V> cur; // find initial int c = comparator.compare(root1.key, root2.key); if (c <= 0) { newRoot = root1; root1 = unlinkRightChild(root1); } else { newRoot = root2; root2 = unlinkRightChild(root2); } cur = newRoot; // merge while (root1 != null && root2 != null) { c = comparator.compare(root1.key, root2.key); if (c <= 0) { // link as left child of cur if (cur.o_c == null) { root1.y_s = cur; } else { root1.y_s = cur.o_c; } cur.o_c = root1; cur = root1; root1 = unlinkRightChild(root1); } else { // link as left child of cur if (cur.o_c == null) { root2.y_s = cur; } else { root2.y_s = cur.o_c; } cur.o_c = root2; cur = root2; root2 = unlinkRightChild(root2); } } while (root1 != null) { // link as left child of cur if (cur.o_c == null) { root1.y_s = cur; } else { root1.y_s = cur.o_c; } cur.o_c = root1; cur = root1; root1 = unlinkRightChild(root1); } while (root2 != null) { // link as left child of cur if (cur.o_c == null) { root2.y_s = cur; } else { root2.y_s = cur.o_c; } cur.o_c = root2; cur = root2; root2 = unlinkRightChild(root2); } return newRoot; }
Top-down union of two skew heaps with comparator. @param root1 the root of the first heap @param root2 the root of the right heap @return the new root of the merged heap
public List<String> get(String key) { if(key == null) throw new IllegalArgumentException("Null keys not allowed."); return map.get(key); }
Return the values stored for the supplied key @param key The key @return The values stored for this key, null if there are no values stored
public String getFirst(String key) { if(key == null) throw new IllegalArgumentException("Null keys not allowed."); List<String> vals = map.get(key); if(vals == null || vals.size() == 0) return null; return vals.get(0); }
Return the first value stored for the supplied key @param key The key @return the first value stored for the key, null if there are no values stored