code
stringlengths 67
466k
| docstring
stringlengths 1
13.2k
|
---|---|
private static void collapseMultiWordPrep(Collection<TypedDependency> list, Collection<TypedDependency> newTypedDeps, String str_mwp0, String str_mwp1, String w_mwp0, String w_mwp1) {
// first find the multiword_preposition: dep(mpw[0], mwp[1])
// the two words should be next to another in the sentence (difference of
// indexes = 1)
TreeGraphNode mwp0 = null;
TreeGraphNode mwp1 = null;
TypedDependency dep = null;
for (TypedDependency td : list) {
if (td.gov().value().equalsIgnoreCase(w_mwp0) && td.dep().value().equalsIgnoreCase(w_mwp1) && Math.abs(td.gov().index() - td.dep().index()) == 1) {
mwp0 = td.gov();
mwp1 = td.dep();
dep = td;
}
}
// now search for prep|advmod|dep|amod(gov, mwp0)
TreeGraphNode governor = null;
TypedDependency prep = null;
for (TypedDependency td1 : list) {
if (td1.dep() == mwp0 && (td1.reln() == PREPOSITIONAL_MODIFIER || td1.reln() == ADVERBIAL_MODIFIER || td1.reln() == ADJECTIVAL_MODIFIER || td1.reln() == DEPENDENT || td1.reln() == MULTI_WORD_EXPRESSION)) {
// we found prep|advmod|dep|amod(gov, mwp0)
prep = td1;
governor = prep.gov();
}
}
// search for the complement: pobj|pcomp(mwp1,X)
// or for pobj|pcomp(mwp0,X)
// There may be more than one in weird constructions; if there are several,
// take the one with the LOWEST index!
TypedDependency pobj = null;
TypedDependency newtd = null;
for (TypedDependency td2 : list) {
if ((td2.gov() == mwp1 || td2.gov() == mwp0) && (td2.reln() == PREPOSITIONAL_OBJECT || td2.reln() == PREPOSITIONAL_COMPLEMENT)) {
if (pobj == null || pobj.dep().index() > td2.dep().index()) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr;
if (td2.reln() == PREPOSITIONAL_COMPLEMENT) {
gr = EnglishGrammaticalRelations.getPrepC(str_mwp0 + '_' + str_mwp1);
} else {
gr = EnglishGrammaticalRelations.getPrep(str_mwp0 + '_' + str_mwp1);
}
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
}
}
// only if we found the three parts, set to KILL and remove
// and add the new one
if (prep != null && dep != null && pobj != null && newtd != null) {
if (DEBUG) {
System.err.println("Removing " + prep + ", " + dep + ", and " + pobj);
System.err.println(" and adding " + newtd);
}
prep.setReln(KILL);
dep.setReln(KILL);
pobj.setReln(KILL);
newTypedDeps.add(newtd);
// now remove typed dependencies with reln "kill"
// and promote possible orphans
for (TypedDependency td1 : list) {
if (td1.reln() != KILL) {
if (td1.gov() == mwp0 || td1.gov() == mwp1) {
// CDM: Thought of adding this in Jan 2010, but it causes
// conflicting relations tmod vs. pobj. Needs more thought
// maybe restrict pobj to first NP in PP, and allow tmod for a later
// one?
if (td1.reln() == TEMPORAL_MODIFIER) {
// special case when an extra NP-TMP is buried in a PP for
// "during the same period last year"
td1.setGov(pobj.dep());
} else {
td1.setGov(governor);
}
}
if (!newTypedDeps.contains(td1)) {
newTypedDeps.add(td1);
}
}
}
list.clear();
list.addAll(newTypedDeps);
}
} | Collapse multiword preposition of the following format:
prep|advmod|dep|amod(gov, mwp0) dep(mpw0,mwp1) pobj|pcomp(mwp1, compl) or
pobj|pcomp(mwp0, compl) -> prep_mwp0_mwp1(gov, compl)
<p/>
@param list
List of typedDependencies to work on,
@param newTypedDeps
List of typedDependencies that we construct
@param str_mwp0
First part of the multiword preposition to construct the collapsed
preposition
@param str_mwp1
Second part of the multiword preposition to construct the
collapsed preposition
@param w_mwp0
First part of the multiword preposition that we look for
@param w_mwp1
Second part of the multiword preposition that we look for |
private static void collapse2WPbis(Collection<TypedDependency> list) {
Collection<TypedDependency> newTypedDeps = new ArrayList<TypedDependency>();
for (String[] mwp : MULTIWORD_PREPS) {
TreeGraphNode mwp0 = null;
TreeGraphNode mwp1 = null;
TreeGraphNode governor = null;
TypedDependency prep = null;
TypedDependency dep = null;
TypedDependency pobj = null;
TypedDependency newtd = null;
// first find the first part of the multi_preposition: advmod|prt(gov, mwp[0])
for (TypedDependency td : list) {
if (td.dep().value().equalsIgnoreCase(mwp[0]) && (td.reln() == PHRASAL_VERB_PARTICLE || td.reln() == ADVERBIAL_MODIFIER || td.reln() == DEPENDENT || td.reln() == MULTI_WORD_EXPRESSION)) {
// we found advmod(gov, mwp0) or prt(gov, mwp0)
governor = td.gov();
mwp0 = td.dep();
dep = td;
}
}
// now search for the second part: prep(gov, mwp1)
// the two words in the mwp should be next to another in the sentence
// (difference of indexes = 1)
for (TypedDependency td1 : list) {
if (mwp0 != null && td1.dep().value().equalsIgnoreCase(mwp[1]) && td1.gov() == governor && td1.reln() == PREPOSITIONAL_MODIFIER && Math.abs(td1.dep().index() - mwp0.index()) == 1) {// we
// found
// prep(gov,
// mwp1)
mwp1 = td1.dep();
prep = td1;
}
}
// search for the complement: pobj|pcomp(mwp1,X)
for (TypedDependency td2 : list) {
if (td2.gov() == mwp1 && td2.reln() == PREPOSITIONAL_OBJECT) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr = EnglishGrammaticalRelations.getPrep(mwp[0] + '_' + mwp[1]);
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
if (td2.gov() == mwp1 && td2.reln() == PREPOSITIONAL_COMPLEMENT) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr = EnglishGrammaticalRelations.getPrepC(mwp[0] + '_' + mwp[1]);
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
}
// only if we found the three parts, set to KILL and remove
// and add the new one
if (prep != null && pobj != null && newtd != null) {
prep.setReln(KILL);
dep.setReln(KILL);
pobj.setReln(KILL);
newTypedDeps.add(newtd);
// now remove typed dependencies with reln "kill"
// and promote possible orphans
for (TypedDependency td1 : list) {
if (td1.reln() != KILL) {
if (td1.gov() == mwp0 || td1.gov() == mwp1) {
td1.setGov(governor);
}
if (!newTypedDeps.contains(td1)) {
newTypedDeps.add(td1);
}
}
}
list.clear();
list.addAll(newTypedDeps);
}
}
} | Collapse multi-words preposition of the following format: advmod|prt(gov,
mwp[0]) prep(gov,mwp[1]) pobj|pcomp(mwp[1], compl) ->
prep_mwp[0]_mwp[1](gov, compl)
<p/>
@param list
List of typedDependencies to work on |
private static void collapse3WP(Collection<TypedDependency> list) {
Collection<TypedDependency> newTypedDeps = new ArrayList<TypedDependency>();
// first, loop over the prepositions for NP annotation
for (String[] mwp : THREEWORD_PREPS) {
TreeGraphNode mwp0 = null;
TreeGraphNode mwp1 = null;
TreeGraphNode mwp2 = null;
TypedDependency dep1 = null;
TypedDependency dep2 = null;
// first find the first part of the 3word preposition: dep(mpw[0], mwp[1])
// the two words should be next to another in the sentence (difference of
// indexes = 1)
for (TypedDependency td : list) {
if (td.gov().value().equalsIgnoreCase(mwp[0]) && td.dep().value().equalsIgnoreCase(mwp[1]) && Math.abs(td.gov().index() - td.dep().index()) == 1) {
mwp0 = td.gov();
mwp1 = td.dep();
dep1 = td;
}
}
// find the second part of the 3word preposition: dep(mpw[1], mwp[2])
// the two words should be next to another in the sentence (difference of
// indexes = 1)
for (TypedDependency td : list) {
if (td.gov() == mwp1 && td.dep().value().equalsIgnoreCase(mwp[2]) && Math.abs(td.gov().index() - td.dep().index()) == 1) {
mwp2 = td.dep();
dep2 = td;
}
}
if (dep1 != null && dep2 != null) {
// now search for prep(gov, mwp0)
TreeGraphNode governor = null;
TypedDependency prep = null;
for (TypedDependency td1 : list) {
if (td1.dep() == mwp0 && td1.reln() == PREPOSITIONAL_MODIFIER) {// we
// found
// prep(gov,
// mwp0)
prep = td1;
governor = prep.gov();
}
}
// search for the complement: pobj|pcomp(mwp2,X)
TypedDependency pobj = null;
TypedDependency newtd = null;
for (TypedDependency td2 : list) {
if (td2.gov() == mwp2 && td2.reln() == PREPOSITIONAL_OBJECT) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr = EnglishGrammaticalRelations.getPrep(mwp[0] + '_' + mwp[1] + '_' + mwp[2]);
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
if (td2.gov() == mwp2 && td2.reln() == PREPOSITIONAL_COMPLEMENT) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr = EnglishGrammaticalRelations.getPrepC(mwp[0] + '_' + mwp[1] + '_' + mwp[2]);
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
}
// only if we found the governor and complement parts, set to KILL and
// remove
// and add the new one
if (prep != null && pobj != null && newtd != null) {
prep.setReln(KILL);
dep1.setReln(KILL);
dep2.setReln(KILL);
pobj.setReln(KILL);
newTypedDeps.add(newtd);
// now remove typed dependencies with reln "kill"
// and promote possible orphans
for (TypedDependency td1 : list) {
if (td1.reln() != KILL) {
if (td1.gov() == mwp0 || td1.gov() == mwp1 || td1.gov() == mwp2) {
td1.setGov(governor);
}
if (!newTypedDeps.contains(td1)) {
newTypedDeps.add(td1);
}
}
}
list.clear();
list.addAll(newTypedDeps);
}
}
}
// second, loop again looking at flat annotation
for (String[] mwp : THREEWORD_PREPS) {
TreeGraphNode mwp0 = null;
TreeGraphNode mwp1 = null;
TreeGraphNode mwp2 = null;
TypedDependency dep1 = null;
TypedDependency dep2 = null;
// first find the first part of the 3word preposition: dep(mpw[0], mwp[1])
// the two words should be next to another in the sentence (difference of
// indexes = 1)
for (TypedDependency td : list) {
if (td.gov().value().equalsIgnoreCase(mwp[0]) && td.dep().value().equalsIgnoreCase(mwp[1]) && Math.abs(td.gov().index() - td.dep().index()) == 1) {
mwp0 = td.gov();
mwp1 = td.dep();
dep1 = td;
}
}
// find the second part of the 3word preposition: dep(mpw[0], mwp[2])
// the two words should be one word apart in the sentence (difference of
// indexes = 2)
for (TypedDependency td : list) {
if (td.gov() == mwp0 && td.dep().value().equalsIgnoreCase(mwp[2]) && Math.abs(td.gov().index() - td.dep().index()) == 2) {
mwp2 = td.dep();
dep2 = td;
}
}
if (dep1 != null && dep2 != null) {
// now search for prep(gov, mwp0)
TreeGraphNode governor = null;
TypedDependency prep = null;
for (TypedDependency td1 : list) {
if (td1.dep() == mwp0 && td1.reln() == PREPOSITIONAL_MODIFIER) {// we
// found
// prep(gov,
// mwp0)
prep = td1;
governor = prep.gov();
}
}
// search for the complement: pobj|pcomp(mwp0,X)
TypedDependency pobj = null;
TypedDependency newtd = null;
for (TypedDependency td2 : list) {
if (td2.gov() == mwp0 && td2.reln() == PREPOSITIONAL_OBJECT) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr = EnglishGrammaticalRelations.getPrep(mwp[0] + '_' + mwp[1] + '_' + mwp[2]);
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
if (td2.gov() == mwp0 && td2.reln() == PREPOSITIONAL_COMPLEMENT) {
pobj = td2;
// create the new gr relation
GrammaticalRelation gr = EnglishGrammaticalRelations.getPrepC(mwp[0] + '_' + mwp[1] + '_' + mwp[2]);
if (governor != null) {
newtd = new TypedDependency(gr, governor, pobj.dep());
}
}
}
// only if we found the governor and complement parts, set to KILL and
// remove
// and add the new one
if (prep != null && pobj != null && newtd != null) {
prep.setReln(KILL);
dep1.setReln(KILL);
dep2.setReln(KILL);
pobj.setReln(KILL);
newTypedDeps.add(newtd);
// now remove typed dependencies with reln "kill"
// and promote possible orphans
for (TypedDependency td1 : list) {
if (td1.reln() != KILL) {
if (td1.gov() == mwp0 || td1.gov() == mwp1 || td1.gov() == mwp2) {
td1.setGov(governor);
}
if (!newTypedDeps.contains(td1)) {
newTypedDeps.add(td1);
}
}
}
list.clear();
list.addAll(newTypedDeps);
}
}
}
} | Collapse 3-word preposition of the following format: <br/>
This will be the case when the preposition is analyzed as a NP <br/>
prep(gov, mwp0) <br/>
X(mwp0,mwp1) <br/>
X(mwp1,mwp2) <br/>
pobj|pcomp(mwp2, compl) <br/>
-> prep_mwp[0]_mwp[1]_mwp[2](gov, compl)
<p/>
It also takes flat annotation into account: <br/>
prep(gov,mwp0) <br/>
X(mwp0,mwp1) <br/>
X(mwp0,mwp2) <br/>
pobj|pcomp(mwp0, compl) <br/>
-> prep_mwp[0]_mwp[1]_mwp[2](gov, compl)
<p/>
@param list
List of typedDependencies to work on |
private static void eraseMultiConj(Collection<TypedDependency> list) {
// find typed deps of form cc(gov, x)
for (TypedDependency td1 : list) {
if (td1.reln() == COORDINATION) {
TreeGraphNode x = td1.dep();
// find typed deps of form dep(x,y) and kill them
for (TypedDependency td2 : list) {
if (td2.gov().equals(x) && (td2.reln() == DEPENDENT || td2.reln() == MULTI_WORD_EXPRESSION || td2.reln() == COORDINATION ||
td2.reln() == ADVERBIAL_MODIFIER || td2.reln() == NEGATION_MODIFIER || td2.reln() == AUX_MODIFIER)) {
td2.setReln(KILL);
}
}
}
}
// now remove typed dependencies with reln "kill"
for (Iterator<TypedDependency> iter = list.iterator(); iter.hasNext();) {
TypedDependency td = iter.next();
if (td.reln() == KILL) {
if (DEBUG) {
System.err.println("Removing rest of multiword conj: " + td);
}
iter.remove();
}
}
} | This method gets rid of multiwords in conjunctions to avoid having them
creating disconnected constituents e.g.,
"bread-1 as-2 well-3 as-4 cheese-5" will be turned into conj_and(bread,
cheese) and then dep(well-3, as-2) and dep(well-3, as-4) cannot be attached
to the graph, these dependencies are erased
@param list List of words to get rid of multiword conjunctions from |
private static void removeDep(Collection<TypedDependency> list) {
Set<GrammaticalRelation> prepRels = new HashSet<GrammaticalRelation>(EnglishGrammaticalRelations.getPreps());
prepRels.addAll(EnglishGrammaticalRelations.getPrepsC());
for (TypedDependency td1 : list) {
if (prepRels.contains(td1.reln())) { // if we have a prep_ relation
TreeGraphNode gov = td1.gov();
TreeGraphNode dep = td1.dep();
for (TypedDependency td2 : list) {
if (td2.reln() == DEPENDENT && td2.gov() == gov && td2.dep() == dep) {
td2.setReln(KILL);
}
}
}
}
// now remove typed dependencies with reln "kill"
for (Iterator<TypedDependency> iter = list.iterator(); iter.hasNext();) {
TypedDependency td = iter.next();
if (td.reln() == KILL) {
if (DEBUG) {
System.err.println("Removing duplicate relation: " + td);
}
iter.remove();
}
}
} | Remove duplicate relations: it can happen when collapsing stranded
prepositions. E.g., "What does CPR stand for?" we get dep(stand, what), and
after collapsing we also get prep_for(stand, what).
@param list A list of typed dependencies to check through |
@Override
public boolean isPunctuationWord(String str) {
return chineseCommaAcceptFilter().accept(str) || chineseEndSentenceAcceptFilter().accept(str) || chineseDouHaoAcceptFilter().accept(str) || chineseQuoteMarkAcceptFilter().accept(str) || chineseParenthesisAcceptFilter().accept(str) || chineseColonAcceptFilter().accept(str) || chineseDashAcceptFilter().accept(str) || chineseOtherAcceptFilter().accept(str);
} | Accepts a String that is a punctuation
word, and rejects everything else.
If one can't tell for sure (as for ' in the Penn Treebank), it
maks the best guess that it can.
@return Whether this is a punctuation word |
public static appfwglobal_auditnslogpolicy_binding[] get(nitro_service service) throws Exception{
appfwglobal_auditnslogpolicy_binding obj = new appfwglobal_auditnslogpolicy_binding();
appfwglobal_auditnslogpolicy_binding response[] = (appfwglobal_auditnslogpolicy_binding[]) obj.get_resources(service);
return response;
} | Use this API to fetch a appfwglobal_auditnslogpolicy_binding resources. |
public static appfwglobal_auditnslogpolicy_binding[] get_filtered(nitro_service service, String filter) throws Exception{
appfwglobal_auditnslogpolicy_binding obj = new appfwglobal_auditnslogpolicy_binding();
options option = new options();
option.set_filter(filter);
appfwglobal_auditnslogpolicy_binding[] response = (appfwglobal_auditnslogpolicy_binding[]) obj.getfiltered(service, option);
return response;
} | Use this API to fetch filtered set of appfwglobal_auditnslogpolicy_binding resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP". |
public static base_response update(nitro_service client, lbsipparameters resource) throws Exception {
lbsipparameters updateresource = new lbsipparameters();
updateresource.rnatsrcport = resource.rnatsrcport;
updateresource.rnatdstport = resource.rnatdstport;
updateresource.retrydur = resource.retrydur;
updateresource.addrportvip = resource.addrportvip;
updateresource.sip503ratethreshold = resource.sip503ratethreshold;
return updateresource.update_resource(client);
} | Use this API to update lbsipparameters. |
public static base_response unset(nitro_service client, lbsipparameters resource, String[] args) throws Exception{
lbsipparameters unsetresource = new lbsipparameters();
return unsetresource.unset_resource(client,args);
} | Use this API to unset the properties of lbsipparameters resource.
Properties that need to be unset are specified in args array. |
public static lbsipparameters get(nitro_service service) throws Exception{
lbsipparameters obj = new lbsipparameters();
lbsipparameters[] response = (lbsipparameters[])obj.get_resources(service);
return response[0];
} | Use this API to fetch all the lbsipparameters resources that are configured on netscaler. |
public static base_response add(nitro_service client, appfwjsoncontenttype resource) throws Exception {
appfwjsoncontenttype addresource = new appfwjsoncontenttype();
addresource.jsoncontenttypevalue = resource.jsoncontenttypevalue;
addresource.isregex = resource.isregex;
return addresource.add_resource(client);
} | Use this API to add appfwjsoncontenttype. |
public static base_responses add(nitro_service client, appfwjsoncontenttype resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
appfwjsoncontenttype addresources[] = new appfwjsoncontenttype[resources.length];
for (int i=0;i<resources.length;i++){
addresources[i] = new appfwjsoncontenttype();
addresources[i].jsoncontenttypevalue = resources[i].jsoncontenttypevalue;
addresources[i].isregex = resources[i].isregex;
}
result = add_bulk_request(client, addresources);
}
return result;
} | Use this API to add appfwjsoncontenttype resources. |
public static base_response delete(nitro_service client, String jsoncontenttypevalue) throws Exception {
appfwjsoncontenttype deleteresource = new appfwjsoncontenttype();
deleteresource.jsoncontenttypevalue = jsoncontenttypevalue;
return deleteresource.delete_resource(client);
} | Use this API to delete appfwjsoncontenttype of given name. |
public static base_responses delete(nitro_service client, String jsoncontenttypevalue[]) throws Exception {
base_responses result = null;
if (jsoncontenttypevalue != null && jsoncontenttypevalue.length > 0) {
appfwjsoncontenttype deleteresources[] = new appfwjsoncontenttype[jsoncontenttypevalue.length];
for (int i=0;i<jsoncontenttypevalue.length;i++){
deleteresources[i] = new appfwjsoncontenttype();
deleteresources[i].jsoncontenttypevalue = jsoncontenttypevalue[i];
}
result = delete_bulk_request(client, deleteresources);
}
return result;
} | Use this API to delete appfwjsoncontenttype resources of given names. |
public static appfwjsoncontenttype[] get(nitro_service service, options option) throws Exception{
appfwjsoncontenttype obj = new appfwjsoncontenttype();
appfwjsoncontenttype[] response = (appfwjsoncontenttype[])obj.get_resources(service,option);
return response;
} | Use this API to fetch all the appfwjsoncontenttype resources that are configured on netscaler. |
public static appfwjsoncontenttype get(nitro_service service, String jsoncontenttypevalue) throws Exception{
appfwjsoncontenttype obj = new appfwjsoncontenttype();
obj.set_jsoncontenttypevalue(jsoncontenttypevalue);
appfwjsoncontenttype response = (appfwjsoncontenttype) obj.get_resource(service);
return response;
} | Use this API to fetch appfwjsoncontenttype resource of given name . |
public static appfwjsoncontenttype[] get(nitro_service service, String jsoncontenttypevalue[]) throws Exception{
if (jsoncontenttypevalue !=null && jsoncontenttypevalue.length>0) {
appfwjsoncontenttype response[] = new appfwjsoncontenttype[jsoncontenttypevalue.length];
appfwjsoncontenttype obj[] = new appfwjsoncontenttype[jsoncontenttypevalue.length];
for (int i=0;i<jsoncontenttypevalue.length;i++) {
obj[i] = new appfwjsoncontenttype();
obj[i].set_jsoncontenttypevalue(jsoncontenttypevalue[i]);
response[i] = (appfwjsoncontenttype) obj[i].get_resource(service);
}
return response;
}
return null;
} | Use this API to fetch appfwjsoncontenttype resources of given names . |
public static appfwjsoncontenttype[] get_filtered(nitro_service service, String filter) throws Exception{
appfwjsoncontenttype obj = new appfwjsoncontenttype();
options option = new options();
option.set_filter(filter);
appfwjsoncontenttype[] response = (appfwjsoncontenttype[]) obj.getfiltered(service, option);
return response;
} | Use this API to fetch filtered set of appfwjsoncontenttype resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP". |
protected void setCategoriesToAvoid(String[] categoriesToAvoid) {
// automatically build defaultLeftRule, defaultRightRule
ArrayList<String> asList = new ArrayList<String>(Arrays.asList(categoriesToAvoid));
asList.add(0, "leftexcept");
defaultLeftRule = new String[asList.size()];
defaultRightRule = new String[asList.size()];
asList.toArray(defaultLeftRule);
asList.set(0, "rightexcept");
asList.toArray(defaultRightRule);
} | Set categories which, if it comes to last resort processing (i.e. none of
the rules matched), will be avoided as heads. In last resort processing,
it will attempt to match the leftmost or rightmost constituent not in this
set but will fall back to the left or rightmost constituent if necessary.
@param categoriesToAvoid list of constituent types to avoid |
public Tree determineHead(Tree t, Tree parent) {
if (nonTerminalInfo == null) {
throw new RuntimeException("Classes derived from AbstractCollinsHeadFinder must" + " create and fill HashMap nonTerminalInfo.");
}
if (t == null || t.isLeaf()) {
return null;
}
if (DEBUG) {
System.err.println("determineHead for " + t.value());
}
Tree[] kids = t.children();
Tree theHead;
// first check if subclass found explicitly marked head
if ((theHead = findMarkedHead(t)) != null) {
if (DEBUG) {
System.err.println("Find marked head method returned " +
theHead.label() + " as head of " + t.label());
}
return theHead;
}
// if the node is a unary, then that kid must be the head
// it used to special case preterminal and ROOT/TOP case
// but that seemed bad (especially hardcoding string "ROOT")
if (kids.length == 1) {
if (DEBUG) {
System.err.println("Only one child determines " +
kids[0].label() + " as head of " + t.label());
}
return kids[0];
}
return determineNonTrivialHead(t, parent);
} | Determine which daughter of the current parse tree is the head.
@param t The parse tree to examine the daughters of.
If this is a leaf, <code>null</code> is returned
@param parent The parent of t
@return The daughter parse tree that is the head of <code>t</code>.
Returns null for leaf nodes.
@see Tree#percolateHeads(HeadFinder)
for a routine to call this and spread heads throughout a tree |
protected Tree determineNonTrivialHead(Tree t, Tree parent) {
Tree theHead = null;
String motherCat = tlp.basicCategory(t.label().value());
if (DEBUG) {
System.err.println("Looking for head of " + t.label() +
"; value is |" + t.label().value() + "|, " +
" baseCat is |" + motherCat + '|');
}
// We know we have nonterminals underneath
// (a bit of a Penn Treebank assumption, but).
// Look at label.
// a total special case....
// first look for POS tag at end
// this appears to be redundant in the Collins case since the rule already would do that
// Tree lastDtr = t.lastChild();
// if (tlp.basicCategory(lastDtr.label().value()).equals("POS")) {
// theHead = lastDtr;
// } else {
String[][] how = nonTerminalInfo.get(motherCat);
if (how == null) {
if (DEBUG) {
System.err.println("Warning: No rule found for " + motherCat +
" (first char: " + motherCat.charAt(0) + ')');
System.err.println("Known nonterms are: " + nonTerminalInfo.keySet());
}
if (defaultRule != null) {
if (DEBUG) {
System.err.println(" Using defaultRule");
}
return traverseLocate(t.children(), defaultRule, true);
} else {
return null;
}
}
for (int i = 0; i < how.length; i++) {
boolean lastResort = (i == how.length - 1);
theHead = traverseLocate(t.children(), how[i], lastResort);
if (theHead != null) {
break;
}
}
if (DEBUG) {
System.err.println(" Chose " + theHead.label());
}
return theHead;
} | Called by determineHead and may be overridden in subclasses
if special treatment is necessary for particular categories. |
protected Tree traverseLocate(Tree[] daughterTrees, String[] how, boolean lastResort) {
int headIdx = 0;
String childCat;
boolean found = false;
if (how[0].equals("left")) {
twoloop:
for (int i = 1; i < how.length; i++) {
for (headIdx = 0; headIdx < daughterTrees.length; headIdx++) {
childCat = tlp.basicCategory(daughterTrees[headIdx].label().value());
if (how[i].equals(childCat)) {
found = true;
break twoloop;
}
}
}
} else if (how[0].equals("leftdis")) {
twoloop:
for (headIdx = 0; headIdx < daughterTrees.length; headIdx++) {
childCat = tlp.basicCategory(daughterTrees[headIdx].label().value());
for (int i = 1; i < how.length; i++) {
if (how[i].equals(childCat)) {
found = true;
break twoloop;
}
}
}
} else if (how[0].equals("right")) {
// from right
twoloop:
for (int i = 1; i < how.length; i++) {
for (headIdx = daughterTrees.length - 1; headIdx >= 0; headIdx--) {
childCat = tlp.basicCategory(daughterTrees[headIdx].label().value());
if (how[i].equals(childCat)) {
found = true;
break twoloop;
}
}
}
} else if (how[0].equals("rightdis")) {
// from right, but search for any, not in turn
twoloop:
for (headIdx = daughterTrees.length - 1; headIdx >= 0; headIdx--) {
childCat = tlp.basicCategory(daughterTrees[headIdx].label().value());
for (int i = 1; i < how.length; i++) {
if (how[i].equals(childCat)) {
found = true;
break twoloop;
}
}
}
} else if (how[0].equals("leftexcept")) {
for (headIdx = 0; headIdx < daughterTrees.length; headIdx++) {
childCat = tlp.basicCategory(daughterTrees[headIdx].label().value());
found = true;
for (int i = 1; i < how.length; i++) {
if (how[i].equals(childCat)) {
found = false;
}
}
if (found) {
break;
}
}
} else if (how[0].equals("rightexcept")) {
for (headIdx = daughterTrees.length - 1; headIdx >= 0; headIdx--) {
childCat = tlp.basicCategory(daughterTrees[headIdx].label().value());
found = true;
for (int i = 1; i < how.length; i++) {
if (how[i].equals(childCat)) {
found = false;
}
}
if (found) {
break;
}
}
} else {
throw new RuntimeException("ERROR: invalid direction type " + how[0] + " to nonTerminalInfo map in AbstractCollinsHeadFinder.");
}
// what happens if our rule didn't match anything
if (!found) {
if (lastResort) {
// use the default rule to try to match anything except categoriesToAvoid
// if that doesn't match, we'll return the left or rightmost child (by
// setting headIdx). We want to be careful to ensure that postOperationFix
// runs exactly once.
String[] rule;
if (how[0].startsWith("left")) {
headIdx = 0;
rule = defaultLeftRule;
} else {
headIdx = daughterTrees.length - 1;
rule = defaultRightRule;
}
Tree child = traverseLocate(daughterTrees, rule, false);
if (child != null) {
return child;
}
} else {
// if we're not the last resort, we can return null to let the next rule try to match
return null;
}
}
headIdx = postOperationFix(headIdx, daughterTrees);
return daughterTrees[headIdx];
} | Attempt to locate head daughter tree from among daughters.
Go through daughterTrees looking for things from a set found by
looking up the motherkey specifier in a hash map, and if
you do not find one, take leftmost or rightmost thing iff
lastResort is true, otherwise return <code>null</code>. |
public static base_response add(nitro_service client, policydataset resource) throws Exception {
policydataset addresource = new policydataset();
addresource.name = resource.name;
addresource.type = resource.type;
addresource.indextype = resource.indextype;
return addresource.add_resource(client);
} | Use this API to add policydataset. |
public static policydataset[] get(nitro_service service) throws Exception{
policydataset obj = new policydataset();
policydataset[] response = (policydataset[])obj.get_resources(service);
return response;
} | Use this API to fetch all the policydataset resources that are configured on netscaler. |
public static policydataset get(nitro_service service, String name) throws Exception{
policydataset obj = new policydataset();
obj.set_name(name);
policydataset response = (policydataset) obj.get_resource(service);
return response;
} | Use this API to fetch policydataset resource of given name . |
public static policydataset[] get_filtered(nitro_service service, String filter) throws Exception{
policydataset obj = new policydataset();
options option = new options();
option.set_filter(filter);
policydataset[] response = (policydataset[]) obj.getfiltered(service, option);
return response;
} | Use this API to fetch filtered set of policydataset resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP". |
public static boolean isSinglePositionPrefix(FieldInfo fieldInfo,
String prefix) throws IOException {
if (fieldInfo == null) {
throw new IOException("no fieldInfo");
} else {
String info = fieldInfo.getAttribute(
MtasCodecPostingsFormat.MTAS_FIELDINFO_ATTRIBUTE_PREFIX_SINGLE_POSITION);
if (info == null) {
throw new IOException("no "
+ MtasCodecPostingsFormat.MTAS_FIELDINFO_ATTRIBUTE_PREFIX_SINGLE_POSITION);
} else {
return Arrays.asList(info.split(Pattern.quote(MtasToken.DELIMITER)))
.contains(prefix);
}
}
} | Checks if is single position prefix.
@param fieldInfo
the field info
@param prefix
the prefix
@return true, if is single position prefix
@throws IOException
Signals that an I/O exception has occurred. |
public static String termValue(String term) {
int i = term.indexOf(MtasToken.DELIMITER);
String value = null;
if (i >= 0) {
value = term.substring((i + MtasToken.DELIMITER.length()));
value = (value.length() > 0) ? value : null;
}
return (value == null) ? null : value.replace("\u0000", "");
} | Term value.
@param term
the term
@return the string |
public static String termPrefix(String term) {
int i = term.indexOf(MtasToken.DELIMITER);
String prefix = term;
if (i >= 0) {
prefix = term.substring(0, i);
}
return prefix.replace("\u0000", "");
} | Term prefix.
@param term
the term
@return the string |
public static void collectField(String field, IndexSearcher searcher,
IndexReader rawReader, ArrayList<Integer> fullDocList,
ArrayList<Integer> fullDocSet, ComponentField fieldStats, Status status)
throws IllegalAccessException, IllegalArgumentException,
InvocationTargetException, IOException {
if (fieldStats != null) {
IndexReader reader = searcher.getIndexReader();
HashMap<MtasSpanQuery, SpanWeight> spansQueryWeight = new HashMap<>();
// only if spanQueryList is not empty
if (fieldStats.spanQueryList.size() > 0) {
final float boost = 0;
for (MtasSpanQuery sq : fieldStats.spanQueryList) {
spansQueryWeight.put(sq, ((MtasSpanQuery) sq.rewrite(reader))
.createWeight(searcher, false, boost));
}
}
// collect
CodecCollector.collectField(field, searcher, reader, rawReader,
fullDocList, fullDocSet, fieldStats, spansQueryWeight, status);
}
} | Collect field.
@param field
the field
@param searcher
the searcher
@param rawReader
the raw reader
@param fullDocList
the full doc list
@param fullDocSet
the full doc set
@param fieldStats
the field stats
@throws IllegalAccessException
the illegal access exception
@throws IllegalArgumentException
the illegal argument exception
@throws InvocationTargetException
the invocation target exception
@throws IOException
Signals that an I/O exception has occurred. |
public static void collectCollection(IndexReader reader,
List<Integer> fullDocSet, ComponentCollection collectionInfo)
throws IOException {
if (collectionInfo != null) {
CodecCollector.collectCollection(reader, fullDocSet, collectionInfo);
}
} | Collect collection.
@param reader
the reader
@param fullDocSet
the full doc set
@param collectionInfo
the collection info
@throws IOException
Signals that an I/O exception has occurred. |
static SortedSet<String> createStatsItems(String statsType)
throws IOException {
SortedSet<String> statsItems = new TreeSet<>();
SortedSet<String> functionItems = new TreeSet<>();
if (statsType != null) {
Matcher m = fpStatsItems.matcher(statsType.trim());
while (m.find()) {
String tmpStatsItem = m.group(2).trim();
if (STATS_TYPES.contains(tmpStatsItem)) {
statsItems.add(tmpStatsItem);
} else if (tmpStatsItem.equals(STATS_TYPE_ALL)) {
for (String type : STATS_TYPES) {
statsItems.add(type);
}
} else if (STATS_FUNCTIONS.contains(tmpStatsItem)) {
if (m.group(3) == null) {
throw new IOException("'" + tmpStatsItem + "' should be called as '"
+ tmpStatsItem + "()' with an optional argument");
} else {
functionItems.add(m.group(1).trim());
}
} else {
throw new IOException("unknown statsType '" + tmpStatsItem + "'");
}
}
}
if (statsItems.size() == 0 && functionItems.size() == 0) {
statsItems.add(STATS_TYPE_SUM);
statsItems.add(STATS_TYPE_N);
statsItems.add(STATS_TYPE_MEAN);
}
if (functionItems.size() > 0) {
statsItems.addAll(functionItems);
}
return statsItems;
} | Creates the stats items.
@param statsType
the stats type
@return the sorted set
@throws IOException
Signals that an I/O exception has occurred. |
static String createStatsType(Set<String> statsItems, String sortType,
MtasFunctionParserFunction functionParser) {
String statsType = STATS_BASIC;
for (String statsItem : statsItems) {
if (STATS_FULL_TYPES.contains(statsItem)) {
statsType = STATS_FULL;
break;
} else if (STATS_ADVANCED_TYPES.contains(statsItem)) {
statsType = STATS_ADVANCED;
} else if (statsType != STATS_ADVANCED
&& STATS_BASIC_TYPES.contains(statsItem)) {
statsType = STATS_BASIC;
} else {
Matcher m = fpStatsFunctionItems.matcher(statsItem.trim());
if (m.find()) {
if (STATS_FUNCTIONS.contains(m.group(2).trim())) {
statsType = STATS_FULL;
break;
}
}
}
}
if (sortType != null && STATS_TYPES.contains(sortType)) {
if (STATS_FULL_TYPES.contains(sortType)) {
statsType = STATS_FULL;
} else if (STATS_ADVANCED_TYPES.contains(sortType)) {
statsType = (statsType == null || statsType != STATS_FULL)
? STATS_ADVANCED : statsType;
}
}
return statsType;
} | Creates the stats type.
@param statsItems
the stats items
@param sortType
the sort type
@param functionParser
the function parser
@return the string |
public void addAllOfFromTo(AbstractIntList other, int from, int to) {
beforeInsertAllOfFromTo(size,other,from,to);
} | Appends the part of the specified list between <code>from</code> (inclusive) and <code>to</code> (inclusive) to the receiver.
@param other the list to be added to the receiver.
@param from the index of the first element to be appended (inclusive).
@param to the index of the last element to be appended (inclusive).
@exception IndexOutOfBoundsException index is out of range (<tt>other.size()>0 && (from<0 || from>to || to>=other.size())</tt>). |
protected void beforeInsertDummies(int index, int length) {
if (index > size || index < 0)
throw new IndexOutOfBoundsException("Index: "+index+", Size: "+size);
if (length > 0) {
ensureCapacity(size + length);
setSizeRaw(size + length);
replaceFromToWithFrom(index+length,size-1,this,index);
}
} | Inserts <tt>length</tt> dummy elements before the specified position into the receiver.
Shifts the element currently at that position (if any) and
any subsequent elements to the right.
<b>This method must set the new size to be <tt>size()+length</tt>.
@param index index before which to insert dummy elements (must be in [0,size])..
@param length number of dummy elements to be inserted.
@throws IndexOutOfBoundsException if <tt>index < 0 || index > size()</tt>. |
public AbstractIntList times(int times) {
AbstractIntList newList = new IntArrayList(times*size());
for (int i=times; --i >= 0; ) {
newList.addAllOfFromTo(this,0,size()-1);
}
return newList;
} | Returns a list which is a concatenation of <code>times</code> times the receiver.
@param times the number of times the receiver shall be copied. |
public static appfwwsdl get(nitro_service service) throws Exception{
appfwwsdl obj = new appfwwsdl();
appfwwsdl[] response = (appfwwsdl[])obj.get_resources(service);
return response[0];
} | Use this API to fetch all the appfwwsdl resources that are configured on netscaler. |
public static appfwwsdl get(nitro_service service, String name) throws Exception{
appfwwsdl obj = new appfwwsdl();
obj.set_name(name);
appfwwsdl response = (appfwwsdl) obj.get_resource(service);
return response;
} | Use this API to fetch appfwwsdl resource of given name . |
public static lbvserver_servicegroup_binding[] get(nitro_service service, String name) throws Exception{
lbvserver_servicegroup_binding obj = new lbvserver_servicegroup_binding();
obj.set_name(name);
lbvserver_servicegroup_binding response[] = (lbvserver_servicegroup_binding[]) obj.get_resources(service);
return response;
} | Use this API to fetch lbvserver_servicegroup_binding resources of given name . |
public static base_response kill(nitro_service client, systemsession resource) throws Exception {
systemsession killresource = new systemsession();
killresource.sid = resource.sid;
killresource.all = resource.all;
return killresource.perform_operation(client,"kill");
} | Use this API to kill systemsession. |
public static base_responses kill(nitro_service client, systemsession resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
systemsession killresources[] = new systemsession[resources.length];
for (int i=0;i<resources.length;i++){
killresources[i] = new systemsession();
killresources[i].sid = resources[i].sid;
killresources[i].all = resources[i].all;
}
result = perform_operation_bulk_request(client, killresources,"kill");
}
return result;
} | Use this API to kill systemsession resources. |
public static systemsession[] get(nitro_service service) throws Exception{
systemsession obj = new systemsession();
systemsession[] response = (systemsession[])obj.get_resources(service);
return response;
} | Use this API to fetch all the systemsession resources that are configured on netscaler. |
public static systemsession get(nitro_service service, Long sid) throws Exception{
systemsession obj = new systemsession();
obj.set_sid(sid);
systemsession response = (systemsession) obj.get_resource(service);
return response;
} | Use this API to fetch systemsession resource of given name . |
public static systemsession[] get(nitro_service service, Long sid[]) throws Exception{
if (sid !=null && sid.length>0) {
systemsession response[] = new systemsession[sid.length];
systemsession obj[] = new systemsession[sid.length];
for (int i=0;i<sid.length;i++) {
obj[i] = new systemsession();
obj[i].set_sid(sid[i]);
response[i] = (systemsession) obj[i].get_resource(service);
}
return response;
}
return null;
} | Use this API to fetch systemsession resources of given names . |
public static systemsession[] get_filtered(nitro_service service, String filter) throws Exception{
systemsession obj = new systemsession();
options option = new options();
option.set_filter(filter);
systemsession[] response = (systemsession[]) obj.getfiltered(service, option);
return response;
} | Use this API to fetch filtered set of systemsession resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP". |
public static base_response update(nitro_service client, snmpalarm resource) throws Exception {
snmpalarm updateresource = new snmpalarm();
updateresource.trapname = resource.trapname;
updateresource.thresholdvalue = resource.thresholdvalue;
updateresource.normalvalue = resource.normalvalue;
updateresource.time = resource.time;
updateresource.state = resource.state;
updateresource.severity = resource.severity;
updateresource.logging = resource.logging;
return updateresource.update_resource(client);
} | Use this API to update snmpalarm. |
public static base_responses update(nitro_service client, snmpalarm resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
snmpalarm updateresources[] = new snmpalarm[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new snmpalarm();
updateresources[i].trapname = resources[i].trapname;
updateresources[i].thresholdvalue = resources[i].thresholdvalue;
updateresources[i].normalvalue = resources[i].normalvalue;
updateresources[i].time = resources[i].time;
updateresources[i].state = resources[i].state;
updateresources[i].severity = resources[i].severity;
updateresources[i].logging = resources[i].logging;
}
result = update_bulk_request(client, updateresources);
}
return result;
} | Use this API to update snmpalarm resources. |
public static base_response unset(nitro_service client, snmpalarm resource, String[] args) throws Exception{
snmpalarm unsetresource = new snmpalarm();
unsetresource.trapname = resource.trapname;
return unsetresource.unset_resource(client,args);
} | Use this API to unset the properties of snmpalarm resource.
Properties that need to be unset are specified in args array. |
public static base_responses unset(nitro_service client, String trapname[], String args[]) throws Exception {
base_responses result = null;
if (trapname != null && trapname.length > 0) {
snmpalarm unsetresources[] = new snmpalarm[trapname.length];
for (int i=0;i<trapname.length;i++){
unsetresources[i] = new snmpalarm();
unsetresources[i].trapname = trapname[i];
}
result = unset_bulk_request(client, unsetresources,args);
}
return result;
} | Use this API to unset the properties of snmpalarm resources.
Properties that need to be unset are specified in args array. |
public static base_response enable(nitro_service client, String trapname) throws Exception {
snmpalarm enableresource = new snmpalarm();
enableresource.trapname = trapname;
return enableresource.perform_operation(client,"enable");
} | Use this API to enable snmpalarm of given name. |
public static base_responses enable(nitro_service client, String trapname[]) throws Exception {
base_responses result = null;
if (trapname != null && trapname.length > 0) {
snmpalarm enableresources[] = new snmpalarm[trapname.length];
for (int i=0;i<trapname.length;i++){
enableresources[i] = new snmpalarm();
enableresources[i].trapname = trapname[i];
}
result = perform_operation_bulk_request(client, enableresources,"enable");
}
return result;
} | Use this API to enable snmpalarm resources of given names. |
public static base_response disable(nitro_service client, String trapname) throws Exception {
snmpalarm disableresource = new snmpalarm();
disableresource.trapname = trapname;
return disableresource.perform_operation(client,"disable");
} | Use this API to disable snmpalarm of given name. |
public static base_responses disable(nitro_service client, String trapname[]) throws Exception {
base_responses result = null;
if (trapname != null && trapname.length > 0) {
snmpalarm disableresources[] = new snmpalarm[trapname.length];
for (int i=0;i<trapname.length;i++){
disableresources[i] = new snmpalarm();
disableresources[i].trapname = trapname[i];
}
result = perform_operation_bulk_request(client, disableresources,"disable");
}
return result;
} | Use this API to disable snmpalarm resources of given names. |
public static snmpalarm[] get(nitro_service service) throws Exception{
snmpalarm obj = new snmpalarm();
snmpalarm[] response = (snmpalarm[])obj.get_resources(service);
return response;
} | Use this API to fetch all the snmpalarm resources that are configured on netscaler. |
public static snmpalarm get(nitro_service service, String trapname) throws Exception{
snmpalarm obj = new snmpalarm();
obj.set_trapname(trapname);
snmpalarm response = (snmpalarm) obj.get_resource(service);
return response;
} | Use this API to fetch snmpalarm resource of given name . |
public static snmpalarm[] get(nitro_service service, String trapname[]) throws Exception{
if (trapname !=null && trapname.length>0) {
snmpalarm response[] = new snmpalarm[trapname.length];
snmpalarm obj[] = new snmpalarm[trapname.length];
for (int i=0;i<trapname.length;i++) {
obj[i] = new snmpalarm();
obj[i].set_trapname(trapname[i]);
response[i] = (snmpalarm) obj[i].get_resource(service);
}
return response;
}
return null;
} | Use this API to fetch snmpalarm resources of given names . |
public static snmpalarm[] get_filtered(nitro_service service, String filter) throws Exception{
snmpalarm obj = new snmpalarm();
options option = new options();
option.set_filter(filter);
snmpalarm[] response = (snmpalarm[]) obj.getfiltered(service, option);
return response;
} | Use this API to fetch filtered set of snmpalarm resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP". |
public static dnsglobal_binding get(nitro_service service) throws Exception{
dnsglobal_binding obj = new dnsglobal_binding();
dnsglobal_binding response = (dnsglobal_binding) obj.get_resource(service);
return response;
} | Use this API to fetch a dnsglobal_binding resource . |
public static base_response create(nitro_service client, sslfipskey resource) throws Exception {
sslfipskey createresource = new sslfipskey();
createresource.fipskeyname = resource.fipskeyname;
createresource.modulus = resource.modulus;
createresource.exponent = resource.exponent;
return createresource.perform_operation(client,"create");
} | Use this API to create sslfipskey. |
public static base_responses create(nitro_service client, sslfipskey resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
sslfipskey createresources[] = new sslfipskey[resources.length];
for (int i=0;i<resources.length;i++){
createresources[i] = new sslfipskey();
createresources[i].fipskeyname = resources[i].fipskeyname;
createresources[i].modulus = resources[i].modulus;
createresources[i].exponent = resources[i].exponent;
}
result = perform_operation_bulk_request(client, createresources,"create");
}
return result;
} | Use this API to create sslfipskey resources. |
public static base_response delete(nitro_service client, String fipskeyname) throws Exception {
sslfipskey deleteresource = new sslfipskey();
deleteresource.fipskeyname = fipskeyname;
return deleteresource.delete_resource(client);
} | Use this API to delete sslfipskey of given name. |
public static base_responses delete(nitro_service client, String fipskeyname[]) throws Exception {
base_responses result = null;
if (fipskeyname != null && fipskeyname.length > 0) {
sslfipskey deleteresources[] = new sslfipskey[fipskeyname.length];
for (int i=0;i<fipskeyname.length;i++){
deleteresources[i] = new sslfipskey();
deleteresources[i].fipskeyname = fipskeyname[i];
}
result = delete_bulk_request(client, deleteresources);
}
return result;
} | Use this API to delete sslfipskey resources of given names. |
public static base_response Import(nitro_service client, sslfipskey resource) throws Exception {
sslfipskey Importresource = new sslfipskey();
Importresource.fipskeyname = resource.fipskeyname;
Importresource.key = resource.key;
Importresource.inform = resource.inform;
Importresource.wrapkeyname = resource.wrapkeyname;
Importresource.iv = resource.iv;
Importresource.exponent = resource.exponent;
return Importresource.perform_operation(client,"Import");
} | Use this API to Import sslfipskey. |
public static base_responses Import(nitro_service client, sslfipskey resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
sslfipskey Importresources[] = new sslfipskey[resources.length];
for (int i=0;i<resources.length;i++){
Importresources[i] = new sslfipskey();
Importresources[i].fipskeyname = resources[i].fipskeyname;
Importresources[i].key = resources[i].key;
Importresources[i].inform = resources[i].inform;
Importresources[i].wrapkeyname = resources[i].wrapkeyname;
Importresources[i].iv = resources[i].iv;
Importresources[i].exponent = resources[i].exponent;
}
result = perform_operation_bulk_request(client, Importresources,"Import");
}
return result;
} | Use this API to Import sslfipskey resources. |
public static base_response export(nitro_service client, sslfipskey resource) throws Exception {
sslfipskey exportresource = new sslfipskey();
exportresource.fipskeyname = resource.fipskeyname;
exportresource.key = resource.key;
return exportresource.perform_operation(client,"export");
} | Use this API to export sslfipskey. |
public static base_responses export(nitro_service client, sslfipskey resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
sslfipskey exportresources[] = new sslfipskey[resources.length];
for (int i=0;i<resources.length;i++){
exportresources[i] = new sslfipskey();
exportresources[i].fipskeyname = resources[i].fipskeyname;
exportresources[i].key = resources[i].key;
}
result = perform_operation_bulk_request(client, exportresources,"export");
}
return result;
} | Use this API to export sslfipskey resources. |
public static sslfipskey[] get(nitro_service service) throws Exception{
sslfipskey obj = new sslfipskey();
sslfipskey[] response = (sslfipskey[])obj.get_resources(service);
return response;
} | Use this API to fetch all the sslfipskey resources that are configured on netscaler. |
public static sslfipskey get(nitro_service service, String fipskeyname) throws Exception{
sslfipskey obj = new sslfipskey();
obj.set_fipskeyname(fipskeyname);
sslfipskey response = (sslfipskey) obj.get_resource(service);
return response;
} | Use this API to fetch sslfipskey resource of given name . |
public static sslfipskey[] get(nitro_service service, String fipskeyname[]) throws Exception{
if (fipskeyname !=null && fipskeyname.length>0) {
sslfipskey response[] = new sslfipskey[fipskeyname.length];
sslfipskey obj[] = new sslfipskey[fipskeyname.length];
for (int i=0;i<fipskeyname.length;i++) {
obj[i] = new sslfipskey();
obj[i].set_fipskeyname(fipskeyname[i]);
response[i] = (sslfipskey) obj[i].get_resource(service);
}
return response;
}
return null;
} | Use this API to fetch sslfipskey resources of given names . |
public static sslfipskey[] get_filtered(nitro_service service, String filter) throws Exception{
sslfipskey obj = new sslfipskey();
options option = new options();
option.set_filter(filter);
sslfipskey[] response = (sslfipskey[]) obj.getfiltered(service, option);
return response;
} | Use this API to fetch filtered set of sslfipskey resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP". |
public static aaauser_vpntrafficpolicy_binding[] get(nitro_service service, String username) throws Exception{
aaauser_vpntrafficpolicy_binding obj = new aaauser_vpntrafficpolicy_binding();
obj.set_username(username);
aaauser_vpntrafficpolicy_binding response[] = (aaauser_vpntrafficpolicy_binding[]) obj.get_resources(service);
return response;
} | Use this API to fetch aaauser_vpntrafficpolicy_binding resources of given name . |
public static gslbdomain_stats[] get(nitro_service service) throws Exception{
gslbdomain_stats obj = new gslbdomain_stats();
gslbdomain_stats[] response = (gslbdomain_stats[])obj.stat_resources(service);
return response;
} | Use this API to fetch the statistics of all gslbdomain_stats resources that are configured on netscaler. |
public static gslbdomain_stats get(nitro_service service, String name) throws Exception{
gslbdomain_stats obj = new gslbdomain_stats();
obj.set_name(name);
gslbdomain_stats response = (gslbdomain_stats) obj.stat_resource(service);
return response;
} | Use this API to fetch statistics of gslbdomain_stats resource of given name . |
public void setEvalCmd(String evalCmd)
{
this.cmdStr = evalCmd;
if (cmdStr != null) {
cmdStr = cmdStr.trim();
if (cmdStr.length() == 0) { cmdStr = null; }
}
cmd = getCmd(cmdStr);
} | Set the evaluation command (set to null to skip evaluation using command line)
@param evalCmd |
public void runTask() {
if (isCanceled()) {
scheduler.cancel(this.getData().getTaskID());
}
else {
try{
taskData.getJavaUtilTimerTask().run();
if(taskData.getPeriodicScheduleStrategy() == null) {
scheduler.cancel(this.getData().getTaskID());
}
else {
if (logger.isDebugEnabled()) {
logger.debug("task data has a periodic schedule strategy, not cancelling the task");
}
}
} catch(Throwable e) {
logger.error(e.getMessage(),e);
}
}
} | /*
(non-Javadoc)
@see TimerTask#runTask() |
public double tune(AbstractStochasticCachingDiffUpdateFunction function, double[] initial, int sampleSize, double seta)
{
Timing timer = new Timing();
int[] sample = function.getSample(sampleSize);
double sobj = getObjective(function, initial, 1, sample);
double besteta = 1;
double bestobj = sobj;
double eta = seta;
int totest = 10;
double factor = 2;
boolean phase2 = false;
while (totest > 0 || !phase2)
{
double obj = tryEta(function, initial, sample, eta);
boolean okay = (obj < sobj);
sayln(" Trying eta=" + eta + " obj=" + obj + ((okay)? "(possible)":"(too large)"));
if (okay)
{
totest -= 1;
if (obj < bestobj) {
bestobj = obj;
besteta = eta;
}
}
if (! phase2)
{
if (okay) {
eta = eta * factor;
} else {
phase2 = true;
eta = seta;
}
}
if (phase2) {
eta = eta / factor;
}
}
// take it on the safe side (implicit regularization)
besteta /= factor;
// determine t
t0 = (int) (1 / (besteta * lambda));
sayln(" Taking eta=" + besteta + " t0=" + t0);
sayln(" Tuning completed in: " + Timing.toSecondsString(timer.report()) + " s");
return besteta;
} | Finds a good learning rate to start with.
eta = 1/(lambda*(t0+t)) - we find good t0
@param function
@param initial
@param sampleSize
@param seta |
private static double getNorm(double[] w)
{
double norm = 0;
for (int i = 0; i < w.length; i++) {
norm += w[i]*w[i];
}
return norm;
} | really this is the square of the L2 norm.... |
public static base_response Import(nitro_service client, appfwsignatures resource) throws Exception {
appfwsignatures Importresource = new appfwsignatures();
Importresource.src = resource.src;
Importresource.name = resource.name;
Importresource.xslt = resource.xslt;
Importresource.comment = resource.comment;
Importresource.overwrite = resource.overwrite;
Importresource.merge = resource.merge;
Importresource.sha1 = resource.sha1;
return Importresource.perform_operation(client,"Import");
} | Use this API to Import appfwsignatures. |
public static base_response change(nitro_service client, appfwsignatures resource) throws Exception {
appfwsignatures updateresource = new appfwsignatures();
updateresource.name = resource.name;
updateresource.mergedefault = resource.mergedefault;
return updateresource.perform_operation(client,"update");
} | Use this API to change appfwsignatures. |
public static appfwsignatures get(nitro_service service) throws Exception{
appfwsignatures obj = new appfwsignatures();
appfwsignatures[] response = (appfwsignatures[])obj.get_resources(service);
return response[0];
} | Use this API to fetch all the appfwsignatures resources that are configured on netscaler. |
public static appfwsignatures get(nitro_service service, String name) throws Exception{
appfwsignatures obj = new appfwsignatures();
obj.set_name(name);
appfwsignatures response = (appfwsignatures) obj.get_resource(service);
return response;
} | Use this API to fetch appfwsignatures resource of given name . |
final public void addPosition(int position) {
if (tokenPosition == null) {
tokenPosition = new MtasPosition(position);
} else {
tokenPosition.add(position);
}
} | Adds the position.
@param position the position |
final public void addPositionRange(int start, int end) {
if (tokenPosition == null) {
tokenPosition = new MtasPosition(start, end);
} else {
int[] positions = new int[end - start + 1];
for (int i = start; i <= end; i++) {
positions[i - start] = i;
}
tokenPosition.add(positions);
}
} | Adds the position range.
@param start the start
@param end the end |
final public void addPositions(int[] positions) {
if (positions != null && positions.length > 0) {
if (tokenPosition == null) {
tokenPosition = new MtasPosition(positions);
} else {
tokenPosition.add(positions);
}
}
} | Adds the positions.
@param positions the positions |
final public void addPositions(Set<Integer> list) {
int[] positions = ArrayUtils
.toPrimitive(list.toArray(new Integer[list.size()]));
addPositions(positions);
} | Adds the positions.
@param list the list |
final public Boolean checkPositionType(String type) {
if (tokenPosition == null) {
return false;
} else {
return tokenPosition.checkType(type);
}
} | Check position type.
@param type the type
@return the boolean |
final public Boolean checkRealOffset() {
if ((tokenRealOffset == null) || !provideRealOffset) {
return false;
} else if (tokenOffset == null) {
return true;
} else if (tokenOffset.getStart() == tokenRealOffset.getStart()
&& tokenOffset.getEnd() == tokenRealOffset.getEnd()) {
return false;
} else {
return true;
}
} | Check real offset.
@return the boolean |
final public void setOffset(Integer start, Integer end) {
if ((start == null) || (end == null)) {
// do nothing
} else if (start > end) {
throw new IllegalArgumentException("Start offset after end offset");
} else {
tokenOffset = new MtasOffset(start, end);
}
} | Sets the offset.
@param start the start
@param end the end |
final public void addOffset(Integer start, Integer end) {
if (tokenOffset == null) {
setOffset(start, end);
} else if ((start == null) || (end == null)) {
// do nothing
} else if (start > end) {
throw new IllegalArgumentException("Start offset after end offset");
} else {
tokenOffset.add(start, end);
}
} | Adds the offset.
@param start the start
@param end the end |
final public void setRealOffset(Integer start, Integer end) {
if ((start == null) || (end == null)) {
// do nothing
} else if (start > end) {
throw new IllegalArgumentException(
"Start real offset after end real offset");
} else {
tokenRealOffset = new MtasOffset(start, end);
}
} | Sets the real offset.
@param start the start
@param end the end |
public static String getPrefixFromValue(String value) {
if (value == null) {
return null;
} else if (value.contains(DELIMITER)) {
String[] list = value.split(DELIMITER);
if (list != null && list.length > 0) {
return list[0].replaceAll("\u0000", "");
} else {
return null;
}
} else {
return value.replaceAll("\u0000", "");
}
} | Gets the prefix from value.
@param value the value
@return the prefix from value |
public static String getPostfixFromValue(String value) {
String postfix = "";
Matcher m = patternPrePostFix.matcher(value);
if (m.find()) {
postfix = m.group(2);
}
return postfix;
} | Gets the postfix from value.
@param value the value
@return the postfix from value |
public static String getPostfixFromValue(BytesRef term) {
int i = term.offset;
int length = term.offset + term.length;
byte[] postfix = new byte[length];
while (i < length) {
if ((term.bytes[i] & 0b10000000) == 0b00000000) {
if (term.bytes[i] == 0b00000001) {
i++;
break;
} else {
i++;
}
} else if ((term.bytes[i] & 0b11100000) == 0b11000000) {
i += 2;
} else if ((term.bytes[i] & 0b11110000) == 0b11100000) {
i += 3;
} else if ((term.bytes[i] & 0b11111000) == 0b11110000) {
i += 4;
} else if ((term.bytes[i] & 0b11111100) == 0b11111000) {
i += 5;
} else if ((term.bytes[i] & 0b11111110) == 0b11111100) {
i += 6;
} else {
return "";
}
}
int start = i;
while (i < length) {
if ((term.bytes[i] & 0b10000000) == 0b00000000) {
if (term.bytes[i] == 0b00000000) {
break;
}
postfix[i] = term.bytes[i];
i++;
} else if ((term.bytes[i] & 0b11100000) == 0b11000000) {
postfix[i] = term.bytes[i];
postfix[i + 1] = term.bytes[i + 1];
i += 2;
} else if ((term.bytes[i] & 0b11110000) == 0b11100000) {
postfix[i] = term.bytes[i];
postfix[i + 1] = term.bytes[i + 1];
postfix[i + 2] = term.bytes[i + 2];
i += 3;
} else if ((term.bytes[i] & 0b11111000) == 0b11110000) {
postfix[i] = term.bytes[i];
postfix[i + 1] = term.bytes[i + 1];
postfix[i + 2] = term.bytes[i + 2];
postfix[i + 3] = term.bytes[i + 3];
i += 4;
} else if ((term.bytes[i] & 0b11111100) == 0b11111000) {
postfix[i] = term.bytes[i];
postfix[i + 1] = term.bytes[i + 1];
postfix[i + 2] = term.bytes[i + 2];
postfix[i + 3] = term.bytes[i + 3];
postfix[i + 4] = term.bytes[i + 4];
i += 5;
} else if ((term.bytes[i] & 0b11111110) == 0b11111100) {
postfix[i] = term.bytes[i];
postfix[i + 1] = term.bytes[i + 1];
postfix[i + 2] = term.bytes[i + 2];
postfix[i + 3] = term.bytes[i + 3];
postfix[i + 4] = term.bytes[i + 4];
postfix[i + 5] = term.bytes[i + 5];
i += 6;
} else {
return "";
}
}
return new String(Arrays.copyOfRange(postfix, start, i),
StandardCharsets.UTF_8);
} | Gets the postfix from value.
@param term the term
@return the postfix from value |
public static Map<String, Automaton> createAutomatonMap(String prefix,
List<String> valueList, Boolean filter) {
HashMap<String, Automaton> automatonMap = new HashMap<>();
if (valueList != null) {
for (String item : valueList) {
if (filter) {
item = item.replaceAll("([\\\"\\)\\(\\<\\>\\.\\@\\#\\]\\[\\{\\}])",
"\\\\$1");
}
automatonMap.put(item,
new RegExp(prefix + MtasToken.DELIMITER + item + "\u0000*")
.toAutomaton());
}
}
return automatonMap;
} | Creates the automaton map.
@param prefix the prefix
@param valueList the value list
@param filter the filter
@return the map |
public static Map<String, ByteRunAutomaton> byteRunAutomatonMap(
Map<String, Automaton> automatonMap) {
HashMap<String, ByteRunAutomaton> byteRunAutomatonMap = new HashMap<>();
if (automatonMap != null) {
for (Entry<String, Automaton> entry : automatonMap.entrySet()) {
byteRunAutomatonMap.put(entry.getKey(),
new ByteRunAutomaton(entry.getValue()));
}
}
return byteRunAutomatonMap;
} | Byte run automaton map.
@param automatonMap the automaton map
@return the map |
public static List<CompiledAutomaton> createAutomata(String prefix,
String regexp, Map<String, Automaton> automatonMap) throws IOException {
List<CompiledAutomaton> list = new ArrayList<>();
Automaton automatonRegexp = null;
if (regexp != null) {
RegExp re = new RegExp(prefix + MtasToken.DELIMITER + regexp + "\u0000*");
automatonRegexp = re.toAutomaton();
}
int step = 500;
List<String> keyList = new ArrayList<>(automatonMap.keySet());
for (int i = 0; i < keyList.size(); i += step) {
int localStep = step;
boolean success = false;
CompiledAutomaton compiledAutomaton = null;
while (!success) {
success = true;
int next = Math.min(keyList.size(), i + localStep);
List<Automaton> listAutomaton = new ArrayList<>();
for (int j = i; j < next; j++) {
listAutomaton.add(automatonMap.get(keyList.get(j)));
}
Automaton automatonList = Operations.union(listAutomaton);
Automaton automaton;
if (automatonRegexp != null) {
automaton = Operations.intersection(automatonList, automatonRegexp);
} else {
automaton = automatonList;
}
try {
compiledAutomaton = new CompiledAutomaton(automaton);
} catch (TooComplexToDeterminizeException e) {
log.debug(e);
success = false;
if (localStep > 1) {
localStep /= 2;
} else {
throw new IOException("TooComplexToDeterminizeException");
}
}
}
list.add(compiledAutomaton);
}
return list;
} | Creates the automata.
@param prefix the prefix
@param regexp the regexp
@param automatonMap the automaton map
@return the list
@throws IOException Signals that an I/O exception has occurred. |
public static authenticationtacacspolicy_authenticationvserver_binding[] get(nitro_service service, String name) throws Exception{
authenticationtacacspolicy_authenticationvserver_binding obj = new authenticationtacacspolicy_authenticationvserver_binding();
obj.set_name(name);
authenticationtacacspolicy_authenticationvserver_binding response[] = (authenticationtacacspolicy_authenticationvserver_binding[]) obj.get_resources(service);
return response;
} | Use this API to fetch authenticationtacacspolicy_authenticationvserver_binding resources of given name . |
@Override
public void add(MtasDataItem<T1, T2> newItem) throws IOException {
if (newItem instanceof MtasDataItemAdvanced) {
MtasDataItemAdvanced<T1, T2> newTypedItem = (MtasDataItemAdvanced<T1, T2>) newItem;
valueSum = operations.add11(valueSum, newTypedItem.valueSum);
valueSumOfLogs = operations.add22(valueSumOfLogs,
newTypedItem.valueSumOfLogs);
valueSumOfSquares = operations.add11(valueSumOfSquares,
newTypedItem.valueSumOfSquares);
valueMin = operations.min11(valueMin, newTypedItem.valueMin);
valueMax = operations.max11(valueMax, newTypedItem.valueMax);
valueN += newTypedItem.valueN;
recomputeComparableSortValue = true;
} else {
throw new IOException("can only add MtasDataItemAdvanced");
}
} | /*
(non-Javadoc)
@see mtas.codec.util.DataCollector.MtasDataItem#add(mtas.codec.util.
DataCollector.MtasDataItem) |
@Override
public Map<String, Object> rewrite(boolean showDebugInfo) throws IOException {
Map<String, Object> response = new HashMap<>();
for (String statsItem : getStatsItems()) {
if (statsItem.equals(CodecUtil.STATS_TYPE_SUM)) {
response.put(statsItem, valueSum);
} else if (statsItem.equals(CodecUtil.STATS_TYPE_N)) {
response.put(statsItem, valueN);
} else if (statsItem.equals(CodecUtil.STATS_TYPE_MAX)) {
response.put(statsItem, valueMax);
} else if (statsItem.equals(CodecUtil.STATS_TYPE_MIN)) {
response.put(statsItem, valueMin);
} else if (statsItem.equals(CodecUtil.STATS_TYPE_SUMSQ)) {
response.put(statsItem, valueSumOfSquares);
} else if (statsItem.equals(CodecUtil.STATS_TYPE_SUMOFLOGS)) {
response.put(statsItem, valueSumOfLogs);
} else if (statsItem.equals(CodecUtil.STATS_TYPE_MEAN)) {
response.put(statsItem, getValue(statsItem));
} else if (statsItem.equals(CodecUtil.STATS_TYPE_GEOMETRICMEAN)) {
response.put(statsItem, getValue(statsItem));
} else if (statsItem.equals(CodecUtil.STATS_TYPE_STANDARDDEVIATION)) {
response.put(statsItem, getValue(statsItem));
} else if (statsItem.equals(CodecUtil.STATS_TYPE_VARIANCE)) {
response.put(statsItem, getValue(statsItem));
} else if (statsItem.equals(CodecUtil.STATS_TYPE_POPULATIONVARIANCE)) {
response.put(statsItem, getValue(statsItem));
} else if (statsItem.equals(CodecUtil.STATS_TYPE_QUADRATICMEAN)) {
response.put(statsItem, getValue(statsItem));
} else {
response.put(statsItem, null);
}
}
if (errorNumber > 0) {
Map<String, Object> errorResponse = new HashMap<>();
for (Entry<String, Integer> entry : getErrorList().entrySet()) {
errorResponse.put(entry.getKey(), entry.getValue());
}
response.put("errorNumber", errorNumber);
response.put("errorList", errorResponse);
}
if (showDebugInfo) {
response.put("sourceNumber", sourceNumber);
response.put("stats", "advanced");
}
return response;
} | /*
(non-Javadoc)
@see mtas.codec.util.DataCollector.MtasDataItem#rewrite() |
Subsets and Splits