text
stringlengths
22
301k
@CODE DisplayKB(G("icd_hier"),1); SaveKB("icd_hierarchy.kbb",G("icd_hier"),2); @@CODE
@NODES _ROOT @RULES _item <- _itemOpen ### (1) _xWILD [fails=(_itemClose)] ### (2) _itemClose ### (3) @@
@NODES _LINE @PRE <1,1> cap(); <3,3> cap(); <5,5> cap(); @RULES # Ex: American\_Sign\_Language _naturalLanguages [layer=(_Caps )] <- American [s] _xWHITE [star s] Sign [s] _xWHITE [star s] Language [s] @@ @PRE <1,1> cap(); @RULES # Ex: ASL _naturalLanguages [layer=(_Caps )] <- ASL [s] @@ # Ex: Afrikaans _naturalLanguages [layer=(_Caps )] <- Afrikaans [s] @@ # Ex: Albanian _naturalLanguages [layer=(_Caps )] <- Albanian [s] @@ # Ex: Amharic _naturalLanguages [layer=(_Caps )] <- Amharic [s] @@ # Ex: Arabic _naturalLanguages [layer=(_Caps )] <- Arabic [s] @@ # Ex: Armenian _naturalLanguages [layer=(_Caps )] <- Armenian [s] @@ # Ex: Azerbaijani _naturalLanguages [layer=(_Caps )] <- Azerbaijani [s] @@ # Ex: Aztec _naturalLanguages [layer=(_Caps )] <- Aztec [s] @@ # Ex: Basque _naturalLanguages [layer=(_Caps )] <- Basque [s] @@ # Ex: Bengali _naturalLanguages [layer=(_Caps )] <- Bengali [s] @@ # Ex: Bosnian _naturalLanguages [layer=(_Caps )] <- Bosnian [s] @@ # Ex: Breton _naturalLanguages [layer=(_Caps )] <- Breton [s] @@ # Ex: Bulgarian _naturalLanguages [layer=(_Caps )] <- Bulgarian [s] @@ # Ex: Burmese _naturalLanguages [layer=(_Caps )] <- Burmese [s] @@ # Ex: Cambodian _naturalLanguages [layer=(_Caps )] <- Cambodian [s] @@ # Ex: Cantonese _naturalLanguages [layer=(_Caps )] <- Cantonese [s] @@ # Ex: Catalan _naturalLanguages [layer=(_Caps )] <- Catalan [s] @@ # Ex: Cebuano _naturalLanguages [layer=(_Caps )] <- Cebuano [s] @@ # Ex: Chamorro _naturalLanguages [layer=(_Caps )] <- Chamorro [s] @@ # Ex: Chechen _naturalLanguages [layer=(_Caps )] <- Chechen [s] @@ # Ex: Chinese _naturalLanguages [layer=(_Caps )] <- Chinese [s] @@ # Ex: Cornish _naturalLanguages [layer=(_Caps )] <- Cornish [s] @@ # Ex: Corsican _naturalLanguages [layer=(_Caps )] <- Corsican [s] @@ # Ex: Croatian _naturalLanguages [layer=(_Caps )] <- Croatian [s] @@ # Ex: Czech _naturalLanguages [layer=(_Caps )] <- Czech [s] @@ # Ex: Danish _naturalLanguages [layer=(_Caps )] <- Danish [s] @@ # Ex: Dutch _naturalLanguages [layer=(_Caps )] <- Dutch [s] @@ # Ex: English _naturalLanguages [layer=(_Caps )] <- English [s] @@ # Ex: Eskimo _naturalLanguages [layer=(_Caps )] <- Eskimo [s] @@ # Ex: Esperanto _naturalLanguages [layer=(_Caps )] <- Esperanto [s] @@ # Ex: Estonian _naturalLanguages [layer=(_Caps )] <- Estonian [s] @@ # Ex: Faroese _naturalLanguages [layer=(_Caps )] <- Faroese [s] @@ # Ex: Farsi _naturalLanguages [layer=(_Caps )] <- Farsi [s] @@ # Ex: Fijian _naturalLanguages [layer=(_Caps )] <- Fijian [s] @@ # Ex: Filipino _naturalLanguages [layer=(_Caps )] <- Filipino [s] @@ # Ex: Finnish _naturalLanguages [layer=(_Caps )] <- Finnish [s] @@ # Ex: Flemish _naturalLanguages [layer=(_Caps )] <- Flemish [s] @@ # Ex: French _naturalLanguages [layer=(_Caps )] <- French [s] @@ # Ex: Frisian _naturalLanguages [layer=(_Caps )] <- Frisian [s] @@ # Ex: Fulani _naturalLanguages [layer=(_Caps )] <- Fulani [s] @@ # Ex: Galician _naturalLanguages [layer=(_Caps )] <- Galician [s] @@ # Ex: Georgian _naturalLanguages [layer=(_Caps )] <- Georgian [s] @@ # Ex: German _naturalLanguages [layer=(_Caps )] <- German [s] @@ # Ex: Greek _naturalLanguages [layer=(_Caps )] <- Greek [s] @@ # Ex: Greenlandic _naturalLanguages [layer=(_Caps )] <- Greenlandic [s] @@ # Ex: Gujarati _naturalLanguages [layer=(_Caps )] <- Gujarati [s] @@ # Ex: Hausa _naturalLanguages [layer=(_Caps )] <- Hausa [s] @@ # Ex: Hawaiian _naturalLanguages [layer=(_Caps )] <- Hawaiian [s] @@ # Ex: Hebrew _naturalLanguages [layer=(_Caps )] <- Hebrew [s] @@ # Ex: Hellenic _naturalLanguages [layer=(_Caps )] <- Hellenic [s] @@ # Ex: Hindi _naturalLanguages [layer=(_Caps )] <- Hindi [s] @@ # Ex: Hmong _naturalLanguages [layer=(_Caps )] <- Hmong [s] @@ # Ex: Hokkien _naturalLanguages [layer=(_Caps )] <- Hokkien [s] @@ # Ex: Hungarian _naturalLanguages [layer=(_Caps )] <- Hungarian [s] @@ # Ex: Icelandic _naturalLanguages [layer=(_Caps )] <- Icelandic [s] @@ # Ex: Ilokano _naturalLanguages [layer=(_Caps )] <- Ilokano [s] @@ # Ex: Indonesian _naturalLanguages [layer=(_Caps )] <- Indonesian [s] @@ # Ex: Irish _naturalLanguages [layer=(_Caps )] <- Irish [s] @@ # Ex: Italian _naturalLanguages [layer=(_Caps )] <- Italian [s] @@ # Ex: Japanese _naturalLanguages [layer=(_Caps )] <- Japanese [s] @@ # Ex: Khmer _naturalLanguages [layer=(_Caps )] <- Khmer [s] @@ # Ex: Korean _naturalLanguages [layer=(_Caps )] <- Korean [s] @@ # Ex: Kuna _naturalLanguages [layer=(_Caps )] <- Kuna [s] @@ # Ex: Kurdish _naturalLanguages [layer=(_Caps )] <- Kurdish [s] @@ # Ex: Lao _naturalLanguages [layer=(_Caps )] <- Lao [s] @@ # Ex: Latin _naturalLanguages [layer=(_Caps )] <- Latin [s] @@ # Ex: Latvian _naturalLanguages [layer=(_Caps )] <- Latvian [s] @@ # Ex: Lithuanian _naturalLanguages [layer=(_Caps )] <- Lithuanian [s] @@ # Ex: Macedonian _naturalLanguages [layer=(_Caps )] <- Macedonian [s] @@ # Ex: Magyar _naturalLanguages [layer=(_Caps )] <- Magyar [s] @@ # Ex: Malagasy _naturalLanguages [layer=(_Caps )] <- Malagasy [s] @@ # Ex: Malay _naturalLanguages [layer=(_Caps )] <- Malay [s] @@ # Ex: Maltese _naturalLanguages [layer=(_Caps )] <- Maltese [s] @@ # Ex: Mandarin _naturalLanguages [layer=(_Caps )] <- Mandarin [s] @@ # Ex: Mayan _naturalLanguages [layer=(_Caps )] <- Mayan [s] @@ # Ex: Mongolian _naturalLanguages [layer=(_Caps )] <- Mongolian [s] @@ # Ex: Norwegian _naturalLanguages [layer=(_Caps )] <- Norwegian [s] @@ # Ex: Pashto _naturalLanguages [layer=(_Caps )] <- Pashto [s] @@ # Ex: Persian _naturalLanguages [layer=(_Caps )] <- Persian [s] @@ # Ex: Polish _naturalLanguages [layer=(_Caps )] <- Polish [s] @@ # Ex: Portuguese _naturalLanguages [layer=(_Caps )] <- Portuguese [s] @@ # Ex: Quechua _naturalLanguages [layer=(_Caps )] <- Quechua [s] @@ # Ex: Romanian _naturalLanguages [layer=(_Caps )] <- Romanian [s] @@ # Ex: Russian _naturalLanguages [layer=(_Caps )] <- Russian [s] @@ # Ex: Serbian _naturalLanguages [layer=(_Caps )] <- Serbian [s] @@ # Ex: Sinhalese _naturalLanguages [layer=(_Caps )] <- Sinhalese [s] @@ # Ex: Slovak _naturalLanguages [layer=(_Caps )] <- Slovak [s] @@ # Ex: Slovenian _naturalLanguages [layer=(_Caps )] <- Slovenian [s] @@ # Ex: Spanish _naturalLanguages [layer=(_Caps )] <- Spanish [s] @@ # Ex: Sundanese _naturalLanguages [layer=(_Caps )] <- Sundanese [s] @@ # Ex: Swahili _naturalLanguages [layer=(_Caps )] <- Swahili [s] @@ # Ex: Swedish _naturalLanguages [layer=(_Caps )] <- Swedish [s] @@ # Ex: Tagalog _naturalLanguages [layer=(_Caps )] <- Tagalog [s] @@ # Ex: Tahitian _naturalLanguages [layer=(_Caps )] <- Tahitian [s] @@ # Ex: Tajik _naturalLanguages [layer=(_Caps )] <- Tajik [s] @@ # Ex: Tamil _naturalLanguages [layer=(_Caps )] <- Tamil [s] @@ # Ex: Telegu _naturalLanguages [layer=(_Caps )] <- Telegu [s] @@ # Ex: Thai _naturalLanguages [layer=(_Caps )] <- Thai [s] @@ # Ex: Tibetan _naturalLanguages [layer=(_Caps )] <- Tibetan [s] @@ # Ex: Turkish _naturalLanguages [layer=(_Caps )] <- Turkish [s] @@ # Ex: Ukrainian _naturalLanguages [layer=(_Caps )] <- Ukrainian [s] @@ # Ex: Urdu _naturalLanguages [layer=(_Caps )] <- Urdu [s] @@ # Ex: Uyghur _naturalLanguages [layer=(_Caps )] <- Uyghur [s] @@ # Ex: Uzbek _naturalLanguages [layer=(_Caps )] <- Uzbek [s] @@ # Ex: Vietnamese _naturalLanguages [layer=(_Caps )] <- Vietnamese [s] @@ # Ex: Welsh _naturalLanguages [layer=(_Caps )] <- Welsh [s] @@ # Ex: Xhosa _naturalLanguages [layer=(_Caps )] <- Xhosa [s] @@ # Ex: Yao _naturalLanguages [layer=(_Caps )] <- Yao [s] @@ # Ex: Yiddish _naturalLanguages [layer=(_Caps )] <- Yiddish [s] @@ # Ex: Yoruba _naturalLanguages [layer=(_Caps )] <- Yoruba [s] @@ # Ex: Zapotec _naturalLanguages [layer=(_Caps )] <- Zapotec [s] @@ # Ex: Zulu _naturalLanguages [layer=(_Caps )] <- Zulu [s] @@
@CODE fileout("dump.txt"); prlit("dump.txt","\n"); prlit("dump.txt","ALP NUM PUN WHT BLOBS INDENT WALP WCAP WALL\n"); prlit("dump.txt","-------------------------------------------\n"); @@CODE @NODES _ROOT # Print out the total count of vars in each line. @POST "dump.txt" << rightjustifynum(N("nalpha",1), 3) << " "; "dump.txt" << rightjustifynum(N("nnum",1), 3) << " "; "dump.txt" << rightjustifynum(N("npunct",1), 3) << " "; "dump.txt" << rightjustifynum(N("nwhite",1), 3) << " "; "dump.txt" << rightjustifynum(N("nblobs",1), 3) << " "; "dump.txt" << rightjustifynum(N("nindent",1),3) << " "; "dump.txt" << rightjustifynum(N("walpha",1), 3) << " "; "dump.txt" << rightjustifynum(N("wcap",1), 3) << " "; "dump.txt" << rightjustifynum(N("wallcaps",1),3) << "\n"; # noop(); @RULES _xNIL <- _LINE @@ @POST prlit("dump.txt", "blank line\n"); @RULES _xNIL <- _BLANKLINE @@
@PATH _ROOT _textZone _LINE @POST excise(1,1); noop(); @RULES _xNIL <- _xWHITE [s] ### (1) @@ @POST X("up",3) = 1; "found.txt" << "this\n"; @RULES _xNIL <- _language [s] ### (1) @@
@NODES _FOOTER @POST "zap.txt" << N("$text") << " - 2\n"; excise(1,1); @RULES _xNIL <- _xWILD [one match=(_beginEnum _endEnum _itemMarker _COMMENT _spacing)] ### (1) @@
@CODE L("hello") = 0; @@CODE #@PATH _ROOT _TEXTZONE _sent _seg @NODES _seg # Assume we're here because this is an np. @CHECK if (!N(2) && !N(3) && !N(4)) fail(); # Handle the raw alpha alpha elsewhere. if (!N("noun",6)) # 01/12/05 AM. fail(); @POST if (N("verb",6)) X("last verb") = 1; # Last is possibly a verb. if (numbersagree(N(2),N(6)) ) X("noun agree") = 1; X("vconj") = vconj(N(6)); xrename("_np"); pncopyvars(N(6),X()); clearpos(X(),1,1); # Zero out token info. # Heur: assign pos to alphas. L("tmp6") = N(6); group(6,6,"_noun"); pncopyvars(L("tmp6"),N(6)); L("tmp5") = N(5); if (N("adj",5)) { group(5,5,"_adj"); pncopyvars(L("tmp5"),N(5)); if (N(4)) L("firstj") = N(4); else L("firstj") = N(5); L("lastj") = N(5); fixadjs(L("firstj"),L("lastj")); fixnoun(N(6)); } else if (N("noun",5)) { group(5,5,"_noun"); pncopyvars(L("tmp5"),N(5)); fixnouns(N(5),N(6)); if (N(4)) fixadjs(N(4),N(4)); } else if (N("verb",5)) { group(5,5,"_adj"); pncopyvars(L("tmp5"),N(5)); if (N(4)) L("firstj") = N(4); else L("firstj") = N(5); fixadjs(L("firstj"),N(5)); } else # don't know; don't care anyway... pncopyvars(L("tmp5"),N(5)); X("id") = "qseq100 dqa alph-alph"; @RULES _xNIL <- _xSTART _xWILD [star match=(_det _pro)] _xWILD [star match=(_quan _num _xNUM)] _adj [star] _xALPHA _xALPHA _xEND @@ @CHECK if (!N(2) && !N(3) && !N(4) && !N(5)) fail(); # Handle the raw alpha elsewhere. if (!N("noun",6)) fail(); if (N("verb",6)) fail(); @POST if (N("verb",6)) X("last verb") = 1; # Last is possibly a verb. # if (numbersagree(N(2),N(6)) ) # X("noun agree") = 1; X("vconj") = vconj(N(6)); L("tmp6") = N(6); group(6,6,"_noun"); pncopyvars(L("tmp6"),N(6)); xrename("_np"); pncopyvars(N(6),X()); clearpos(X(),1,1); # Zero out token info. @RULES _xNIL <- _xSTART _xWILD [star match=(_det _pro)] _xWILD [star match=(_quan _num _xNUM)] _adj [star] _noun [star] _xALPHA _xEND @@ @CHECK if (X("seg type") != "np") fail(); @POST L("tmp3") = N(3); fixnpnonhead(2); group(3,3,"_noun"); pncopyvars(L("tmp3"),N(3)); fixnoun(N(3)); xrename("_np"); pncopyvars(L("tmp3"),X()); clearpos(X(),1,1); # Zero out token info. @RULES _xNIL <- _xSTART _xALPHA _xALPHA _xEND @@
@NODES _ROOT @POST if (N("code", 1)) { GetWordCodeMappings(N(1)); } @RULES _xNIL <- _term ### (2) @@
@PATH _ROOT @POST excise(1,1); noop(); @RULES _xNIL <- _xWHITE [s] ### (1) @@
@NODES _ROOT @POST S("language") = N("$text",2); "lang.txt" << N("$text",2) << "\n"; single(); @RULES _language <- _langStart ### (1) _xWILD [plus fails=(_langEnd)] ### (2) _langEnd ### (3) @@ @RULES _conjugation <- _startConju ### (1) _xWILD [plus fails=(\})] ### (2) \} ### (3) \} ### (4) @@
@CODE if (G("pretagged")) exitpass(); @@CODE @NODES _TEXTZONE @POST S("neg") = 1; S("mypos") = "VBP"; single(); @RULES _modal <- won \' t @@ _modal <- can \' t @@ _modal <- cannot @@ _modal <- mayn \' t @@ _modal <- mightn \' t @@ _modal <- shan \' t @@ _modal <- oughtn \' t @@ _modal <- mustn \' t @@ _modal <- don \' t @@ _modal <- doesn \' t @@ _have [layer=(_verb)] <- haven \' t @@ _have [layer=(_verb)] <- hasn \' t @@ _be [layer=(_verb)] <- aren \' t @@ _be [layer=(_verb)] <- isn \' t @@ @POST S("neg") = 1; S("mypos") = "VBD"; single(); @RULES _modal <- couldn \' t @@ _modal <- didn \' t @@ _modal <- shouldn \' t @@ _modal <- wouldn \' t @@ _have [layer=(_verb)] <- hadn \' t @@ _be [layer=(_verb)] <- wasn \' t @@ _be [layer=(_verb)] <- weren \' t @@ _be [layer=(_verb)] <- aren \' t @@ _be [layer=(_verb)] <- isn \' t @@ @POST group(1,1, "_proSubj"); group(1,1, "_pro"); group(2,3, "am"); # Retokenize. @RULES _xNIL <- I [s] \' [s] m [s] @@ # you're @POST group(2,3,"_be"); group(2,2,"_verb"); N("sem",2) = N("stem",2) = "be"; N("number",2) = "any"; chpos(N(2),"VBP"); L("tmp1") = N(1); group(1,1,"_proSubj"); group(1,1,"_pro"); pncopyvars(L("tmp1"),N(1)); @RULES _xNIL <- you \' re @@ # you're @POST group(2,3,"_be"); group(2,2,"_verb"); N("sem",2) = N("stem",2) = "be"; N("number",2) = "plural"; chpos(N(2),"VBP"); L("tmp1") = N(1); group(1,1,"_proSubj"); group(1,1,"_pro"); pncopyvars(L("tmp1"),N(1)); @RULES _xNIL <- we \' re @@ # you're @POST group(2,3,"_be"); group(2,2,"_verb"); N("sem",2) = N("stem",2) = "be"; N("number",2) = "plural"; chpos(N(2),"VBP"); L("tmp1") = N(1); group(1,1,"_proSubj"); group(1,1,"_pro"); pncopyvars(L("tmp1"),N(1)); @RULES _xNIL <- they \' re @@ @RULES _num <- _xNUM \' [s] s [s] @@ #@RULES #_aposS <- # \' [s] # s [s] # @@ # alpha ' @POST N("apostrophe",1) = 1; excise(2,2); # Trash the apostrophe. @RULES _xNIL <- _xALPHA [s] \' [trigger] @@ # Some mumbles. @PRE <1,1> varz("NOSP"); <2,2> var("NOSP"); @POST ++X("nmumbles"); single(); @RULES _mumble <- _xWILD [one match=(_xALPHA _xNUM \_ )] _xWILD [plus match=(_xALPHA _xNUM \_ )] @@ # Some mumbles. @POST ++X("nmumbles"); single(); @RULES _mumble <- _xALPHA _xNUM @@ _noun <- _noun \/ _noun @@ @RULES _dbldash <- \- \- \- @@ # Triple! _dbldash <- \- \- @@ @POST group(2,2,"_dbldash"); @RULES _xNIL <- _xWHITE [s plus] \- _xWHITE [s plus] @@
# Fetch the first word-concept in the KB dictionary hierarchy. L("return_con") = dictfirst(L(""));
# show how we can create a concept like noun and assign it as the value of a word's attribute. We create the concepts named words and noun as children to the root of the KB (concept), and then make the concept book a child of words G("words") = makeconcept(findroot(), "words"); G("noun") = makeconcept(findroot(),"noun"); G("noun_book") = makeconcept(G("words"),"book");
@PATH _ROOT _pronunciations _headerZone _LINE @POST addstrval(X("pronunciation",2),"phonetic",N("$text",1)); "debug.txt" << N("$text",1) << "\n"; @RULES _xNIL <- _phonetic ### (1) @@
############################################### # FILE: XML OverallSchema.pat # # SUBJ: Put together the last pieces of an XML# # document # # AUTH: Paul Deane # # CREATED: 14/Jan/01 # DATE OF THIS VERSION: 31/Aug/01 # # Copyright ############################################### ############################### # CONTENTS # # In code section: sorting # # algorithm that organizes # # the data from DTD info # # # # In main rule: overall # # structure of an XML file # ############################### @CODE if (G("Organize DTD")==1) { #At this point the DTD knowledge base is complete. #So the next task is to sort the element list under #the gram tab so that the information is ordered in #the order we want to make generation of passes as #natural as possible. G("CurrentConcept") = down(G("Elements")) ; G("CurrentConceptName") = conceptname(G("CurrentConcept")) ; G("Continue") = 1 ; G("CurrentChildConcept") = down(G("CurrentConcept")) ; G("CurrentChildConceptName") = conceptname(G("CurrentChildConcept")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),G("CurrentChildConceptName")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; #This is a sorting algorithm that puts the elements of #the DTD data we've been creating into alphabetic order G("attrName") = "sortedYet" ; while ( G("Continue") != 0 ) { if (G("CurrentConcept") == 0 ) { G("Continue") = 0 ; } else if ( G("CurrentChildConcept") == 0 && numval(G("CurrentConcept"),G("attrName")) == 0 ) { replaceval(G("CurrentConcept"),G("attrName"),1) ; G("CurrentConcept") = down(G("Elements")) ; G("CurrentConceptName") = conceptname(G("CurrentConcept")) ; G("CurrentChildConcept") = down(G("CurrentConcept")) ; if (G("CurrentChildConcept")!=0) { G("CurrentChildConceptName") = conceptname(G("CurrentChildConcept")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),G("CurrentChildConceptName")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; } else { G("CurrentChildConceptName") = " " ; G("ReferenceIDforConcept") = 0 ; G("NextConcept") = 0 ; } } else if (G("CurrentChildConcept") == 0 ) { G("CurrentConcept") = next(G("CurrentConcept")) ; G("CurrentConceptName") = conceptname(G("CurrentConcept")) ; G("CurrentChildConcept") = down(G("CurrentConcept")) ; if (G("CurrentChildConcept")!=0) { G("CurrentChildConceptName") = conceptname(G("CurrentChildConcept")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),G("CurrentChildConceptName")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; } else { G("CurrentChildConceptName") = " " ; G("ReferenceIDforConcept") = 0 ; G("NextConcept") = 0 ; } } else if ( G("CurrentChildConcept") != 0 && G("ReferenceIDforConcept") != 0 && G("CurrentChildConceptName") != " " && numval(G("ReferenceIDforConcept"),G("attrName")) == 0 && !strequal(G("CurrentConceptName"),G("CurrentChildConceptName")) ) { movecleft(G("ReferenceIDforConcept")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; if ( G("NextConcept") == G("CurrentConcept") ) { G("CurrentChildConcept") = next(G("CurrentChildConcept")) ; if (G("CurrentChildConcept") != 0 ) { G("CurrentChildConceptName") = conceptname(G("CurrentChildConcept")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),G("CurrentChildConceptName")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; } else { replaceval(G("CurrentConcept"),G("attrName"),1) ; G("CurrentConcept") = down(G("Elements")) ; G("CurrentConceptName") = conceptname(G("CurrentConcept")) ; G("CurrentChildConcept") = down(G("CurrentConcept")) ; if (G("CurrentChildConcept")!=0) { G("CurrentChildConceptName") = conceptname(G("CurrentChildConcept")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),G("CurrentChildConceptName")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; } else { G("CurrentChildConceptName") = " " ; G("ReferenceIDforConcept") = 0 ; G("NextConcept") = 0 ; } } } } else if ( G("CurrentConcept") != 0 && G("CurrentChildConcept") != 0 ) { G("CurrentChildConcept") = next(G("CurrentChildConcept")) ; G("CurrentConceptName") = conceptname(G("CurrentConcept")) ; if (G("CurrentChildConcept")!=0) { G("CurrentChildConceptName") = conceptname(G("CurrentChildConcept")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),G("CurrentChildConceptName")) ; G("NextConcept") = next(G("ReferenceIDforConcept")) ; } else { G("CurrentChildConceptName") = " " ; G("ReferenceIDforConcept") = 0 ; G("NextConcept") = 0 ; } } else { G("Continue") = 0 ; } } G("Attr") = "sortedYet" ; G("CurrentConcept") = down(G("Entities")) ; while ( G("CurrentConcept") != 0 ) { "hierDtd.log" << conceptpath(G("CurrentConcept")) << "\n" ; G("nextC") = next(G("CurrentConcept")) ; G("CurrentConcept") = G("nextC") ; } G("CurrentConcept") = down(G("Elements")) ; while ( G("CurrentConcept") != 0 ) { rmattr(G("CurrentConcept"),"sortedYet") ; "hierDtd.log" << conceptpath(G("CurrentConcept")) << "\n" ; G("CurrentChildConcept") = down(G("CurrentConcept")) ; while (G("CurrentChildConcept") != 0 ) { rmattr(G("CurrentChildConcept"),"sortedYet") ; "hierDtd.log" << conceptpath(G("CurrentChildConcept")) << "\n" ; G("nextC") = next(G("CurrentChildConcept")) ; G("CurrentChildConcept") = G("nextC") ; } G("nextC") = next(G("CurrentConcept")) ; G("CurrentConcept") = G("nextC") ; } } @@CODE @NODES _ROOT @RULES _XMLDocument [unsealed] <- _Prolog [one] ### (1) _Element [one] ### (2) _Misc [star] ### (3) @@
@CODE DisplayKB(G("icd_codes"),1); SaveToKB(G("icd_codes"), "icd_mapping.kbb"); @@CODE
@PATH _ROOT _headerZone _Experience _headerZone _LINE @RULES _dateRange <- _xNUM ### (1) to ### (2) _xWILD [match=(_xNUM present)] ### (3) @@
@CODE L("hello") = 0; @@CODE @NODES _sent # np lists... @POST if (pnname(N(7)) != "_np") { nountonp(7,1); } group(2,7,"_np"); N("list-np",2) = 1; # listadd(7,2,"true"); @RULES _xNIL <- \, _xWILD [one match=(_noun _np)] \, _xWILD [one match=(_noun _np)] \, [opt] _conj _xWILD [one match=(_noun _np)] _xWILD [one lookahead fail=(_noun _xALPHA)] @@ # Extend list backwards. @CHECK if (!N("list-np",4)) fail(); @POST listadd(4,2,"true"); @RULES _xNIL <- _xWILD [one fail=(_det _quan _num _xNUM _adj _noun)] _xWILD [one match=(_noun _np)] \, _np @@ _xNIL <- _xSTART _xWILD [one match=(_noun _np)] \, _np @@ @POST nountonp(2,1); @RULES _xNIL <- _xSTART _noun _xWILD [one lookahead fail=(_noun _xALPHA)] @@ # cap's adj # cap's noun # Note: "John's green" could mean "John IS green...". #@CHECK # if (!N("apos-s",1)) # fail(); #@POST # L("tmp") = N(1); # group(1,1,"_adj"); # pncopyvars(L("tmp"),N(1)); #@RULES #_xNIL <- # _caps # _xWILD [one lookahead match=(_noun _adj)] # @@ # noun alpha adj @CHECK if (!N("verb",2)) fail(); if (!vconjq(N(2),"-en")) fail(); @POST L("tmp2") = N(2); group(2,2,"_verb"); pncopyvars(L("tmp2"),N(2)); L("v") = N(2); group(2,2,"_vg"); mhbv(N(2),L("neg"),0,0,0,0,L("v")); pncopyvars(L("tmp2"),N(2)); # N("voice",2) = "passive"; N("verb node",2) = L("v"); clearpos(N(2),1,0); # Zero out token info. @RULES _xNIL <- _noun _xALPHA _xWILD [one lookahead fail=(_noun _np)] @@ # noun alpha vg # Too broad. Too old. # #@CHECK # if (!N("noun",2)) # fail(); # if (N("adv",2)) # fail(); # Too ambiguous... #@POST # L("tmp2") = N(2); # group(2,2,"_noun"); # pncopyvars(L("tmp2"),N(2)); #@RULES #_xNIL <- # _noun # _xALPHA # _xWILD [one lookahead match=(_verb _vg)] # @@ # vg adj alpha alpha @CHECK if (!N("noun",3) && !N("adj",3)) fail(); if (!N("noun",4) && !N("adj",4)) fail(); @POST L("tmp3") = N(3); L("tmp4") = N(4); if (N("adj",3)) { group(3,3,"_adj"); pncopyvars(L("tmp3"),N(3)); fixadj(N(3)); } else { group(3,3,"_noun"); pncopyvars(L("tmp3"),N(3)); } if (N("noun",4)) group(4,4,"_noun"); else group(4,4,"_adj"); pncopyvars(L("tmp4"),N(4)); if (pnname(N(4)) == "_adj") fixadj(N(4)); group(2,4,"_np"); pncopyvars(L("tmp4"),N(2)); clearpos(N(2),1,1); # Zero out token info. @RULES _xNIL <- _vg _xWILD [plus match=(_det _quan _num _xNUM _adj)] _xALPHA _xALPHA _xWILD [plus lookahead match=(_prep _conj _qEOS _fnword _xEND)] @@ # apos-s alpha #@CHECK # if (!N("apos-s",1)) # fail(); #@POST # L("tmp2") = N(2); # if (N("noun",2)) # group(2,2,"_noun"); # else if (N("adj",2)) # group(2,2,"_adj"); # else if (N("adv",2)) # group(2,2,"_adv"); # pncopyvars(L("tmp2"),N(2)); #@RULES #_xNIL <- # _adj # _xALPHA # _xWILD [one lookahead match=(_prep _conj _qEOS _fnword _xEND)] # @@ # there vg np @CHECK if (N("there-vg",2)) fail(); @POST N("there-vg",2) = 1; fixvg(N(2),"active","VBP"); @RULES _xNIL <- there [s] _vg [lookahead] _np @@ # det alpha noun @POST L("tmp2") = N(2); if (N("adj",2)) { group(2,2,"_adj"); fixadj(N(2)); } else if (N("noun",2)) group(2,2,"_noun"); else if (N("adv",2)) group(2,2,"_adv"); else group(2,2,"_noun"); # Loop guard. # 06/19/05 pncopyvars(L("tmp2"),N(2)); @RULES _xNIL <- _xWILD [plus match=(_det _quan _num _xNUM _adj)] _xALPHA _noun @@ # vg _adj _noun _fnword @POST pncopyvars(3); sclearpos(1,0); # Zero out token info. singler(2,3); @RULES _np <- _vg _adj _noun _xWILD [one lookahead fail=(_noun _xALPHA)] @@ # det adj noun noun @CHECK L("last") = lasteltnode(3); if (N(1)) L("first") = N(1); else if (N(2)) L("first") = N(2); else L("first") = N(3); if (!numbersagree(L("first"),L("last"))) S("disagree") = 1; @POST # if (S("disagree")) # { # # Check multiple nouns... # group(1,2,"_np"); # } # else # { L("tmp") = lasteltnode(3); group(1,3,"_np"); pncopyvars(L("tmp"),N(1)); clearpos(N(1),1,1); # Zero out token info. # } @RULES _xNIL <- _xWILD [plus match=(_det _quan _num _xNUM)] _adj [star] _noun [plus] _xWILD [one lookahead fail=(_xALPHA _aposS)] @@ # det quan adj alpha # Todo: agreement checks here. @CHECK if (N("pos200 a-v",3)) fail(); # Loop guard. @POST N("pos200 a-v",3) = 1; # Loop guard. L("last") = lasteltnode(2); L("tmp3") = N(3); if (!N("noun",3)) { # etc. } else { if (numbersagree(N(2),N(3))) # Agreement of range of nodes. { if (G("error")) "err.txt" << "Agreement " << L("need") << "\n"; } else if (N("verb",3)) { group(3,3,"_verb"); pncopyvars(L("tmp3"),N(3)); clearpos(N(3),1,0); # Zero out token info. } } @RULES _xNIL <- _xSTART _xWILD [plus match=(_det _quan _num _xNUM _adj)] _xALPHA _xWILD [one lookahead match=(_adv _advl _prep)] @@ @CHECK if (!N("noun",2)) fail(); @POST L("tmp2") = N(2); group(2,2,"_noun"); pncopyvars(L("tmp2"),N(2)); @RULES _xNIL <- _xWILD [plus match=(_det _quan _num _xNUM _adj)] _xALPHA _xWILD [one lookahead match=(_vg _verb)] @@ # prep alpha @CHECK if (!N("noun",2) && !N("adj",2) && !N("unknown",2)) fail(); @POST L("tmp2") = N(2); if (N("noun",2) || N("unknown",2)) { group(2,2,"_noun"); pncopyvars(L("tmp2"),N(2)); if (number(N(2)) == "plural") chpos(N(2),"NNS"); else if (N("unknown",2) && strendswith(N("$text",2),"s")) chpos(N(2),"NNS"); } else { group(2,2,"_adj"); pncopyvars(L("tmp2"),N(2)); fixadj(N(2)); } nountonp(2,1); @RULES _xNIL <- _xWILD [one match=(_prep) except=(to)] _xALPHA _xWILD [lookahead one match=(_prep _conj \, _fnword _xEND)] @@ # vg , noun vg @POST nountonp(3,1); @RULES _xNIL <- _xWILD [one match=(_vg)] \, [opt] _noun \, [opt lookahead] _xWILD [one match=(_vg)] @@ # noun alpha noun @CHECK if (!N("verb",2)) fail(); if (N("noun",2)) fail(); @POST L("tmp2") = N(2); group(2,2,"_verb"); L("v") = N(2); pncopyvars(L("tmp2"),N(2)); group(2,2,"_vg"); mhbv(N(2),L("neg"),0,0,0,0,L("v")); pncopyvars(L("tmp2"),N(2)); N("voice",2) = "active"; clearpos(N(2),1,0); # Zero out token info. @RULES _xNIL <- _noun _xALPHA _noun @@ # vg noun aposS # prep noun aposS @POST chpos(N(2),"POS"); # Assumed. L("tmp2") = lasteltnode(2); group(2,3,"_adj"); pncopyvars(L("tmp2"),N(2)); clearpos(N(2),1,0); N("possessive",2) = 1; if (pnname(N(1)) == "_vg") if (!N("voice",1)) N("voice",1) = "active"; @RULES _xNIL <- _xWILD [one match=(_vg _verb _prep _conj \,)] _noun [plus] _aposS @@ # adj alpha fnword @CHECK if (!N("noun",2)) fail(); @POST L("tmp2") = N(2); group(2,2,"_noun"); pncopyvars(L("tmp2"),N(2)); @RULES _xNIL <- _adj _xALPHA _xWILD [lookahead one match=(_fnword)] @@ # fnword noun vg @POST dqaninfo(0,0,0,2); # Cleanup. # 05/25/07 AM. groupnp(); @RULES _xNIL <- _xWILD [one match=(_fnword)] _noun [plus] _xWILD [one lookahead match=(_vg)] @@ # apposition, appositive. # vg np , noun , prep @POST # Todo: compose... L("tmp2") = N(2); L("tmp4") = N(4); if (pnname(N(4)) == "_noun") { nountonp(4,1); } group(2,5,"_np"); pncopyvars(L("tmp2"),N(2)); pncopyvars(L("tmp4"),N(2)); clearpos(N(2),1,0); # Zero out token info. @RULES _xNIL <- _xWILD [one match=(_verb _vg _fnword _prep _conj _clause)] _np _xWILD [one match=( \, _dbldash)] _xWILD [one match=(_noun _np)] _xWILD [one match=( \, _dbldash)] _xWILD [one lookahead match=(_prep _vg)] @@ # vg np , noun , prep @POST # Todo: compose... L("tmp2") = N(2); L("tmp4") = N(4); if (pnname(N(4)) == "_noun") { nountonp(4,1); } group(2,4,"_np"); pncopyvars(L("tmp2"),N(2)); pncopyvars(L("tmp4"),N(2)); clearpos(N(2),1,0); # Zero out token info. N("ne",2) = 0; @RULES _xNIL <- _xWILD [one match=(_verb _vg _fnword _prep)] _np \, _xWILD [one match=(_noun _np)] _xWILD [one lookahead match=(_qEOS _xEND)] @@ # noun alpha vg # assume spell correct hasn't found verb. @CHECK if (!N("unknown",2)) fail(); @POST L("tmp2") = N(2); group(2,2,"_noun"); pncopyvars(L("tmp2"),N(2)); @RULES _xNIL <- _xWILD [one match=(_noun _np)] _xALPHA _xWILD [one lookahead match=(_verb _vg)] @@ # neither noun nor @POST if (nonliteral(N(1))) pnrename(N(1),"_det"); else group(1,1,"_det"); chpos(N(1),"CC"); chpos(N(3),"CC"); @RULES _xNIL <- _xWILD [s one match=(neither) except=(_det)] # Loop guard. _xWILD [one lookahead match=(_noun _np)] nor [s] @@ # Some clausal pattern... # vg np prep np vg # Looking for a 2nd vg to be passive. @CHECK if (N("fixed",5)) fail(); @POST L("vb") = N("verb node",5); fixverb(L("vb"),"passive","VBP"); N("fixed",5) = 1; # To avoid infinite recursion. @RULES _xNIL <- _vg _np _prep _np _vg @@ # noun noun # dqan @POST L("tmp2") = lasteltnode(2); nountonp(2,1); N("ne",2) = 0; if (pnname(N(3)) == "_vg") if (!N("voice",3)) N("voice",3) = "active"; @RULES _xNIL <- _xWILD [one match=(_xSTART _qEOS _np)] _noun [plus] _xWILD [one lookahead match=(_verb _vg _adv _advl _np \,)] @@ # vg to vg # used to make #@POST # # Assign infinitive. # L("n") = pndown(N(3)); # if (pnname(L("n")) == "_verb") # pnreplaceval(L("n"),"inf",1); # L("v") = N("verb node",3); # if (L("v")) # chpos(L("v"),"VB"); # infinitive. # L("v") = N("verb node",1); # if (L("v")) # fixverb(L("v"),"active",0); # # L("tmp3") = N(3); # group(1,3,"_vg"); # pncopyvars(L("tmp3"),N(1)); # # N("voice") ? #@RULES #_xNIL <- # _vg # to [s] # _vg # @@ # prep np and np # check agreement? @PRE <2,2> varne("glom","left"); <4,4> varne("glom","right"); @POST if (pnname(N(4)) == "_noun") { group(4,4,"_np"); N("bracket",4) = 1; } if (pnname(N(2)) == "_noun") { nountonp(2,1); } group(2,4,"_np"); N("compound-np",2) = 1; clearpos(N(2),1,0); @RULES _xNIL <- _xWILD [one match=(_prep)] _xWILD [one match=(_np _noun)] _conj _xWILD [one match=(_np _noun)] _xWILD [one lookahead match=(_verb _vg _prep)] @@ # np , alpha conj alpha np @CHECK if (!N("noun",3)) fail(); if (!N("adv",6)) fail(); @POST L("tmp6") = N(6); L("tmp3") = N(3); group(6,6,"_adv"); pncopyvars(L("tmp6"),N(6)); group(3,3,"_noun"); pncopyvars(L("tmp3"),N(3)); fixnoun(N(3)); @RULES _xNIL <- _xWILD [one match=(_np _noun)] \, _xALPHA \, [opt] _conj _xALPHA _xWILD [one lookahead match=(_np)] @@ # verb alpha prep @CHECK if (N("pos num",2) != 2) fail(); if (!N("noun",2)) fail(); @POST L("tmp2") = N(2); group(2,2,"_noun"); pncopyvars(L("tmp2"),N(2)); fixnoun(N(2)); nountonp(2,1); @RULES _xNIL <- _xWILD [one match=(_verb _vg)] _xALPHA _xWILD [one lookahead match=(_prep)] @@ # verb dqan alpha @CHECK if (!N("noun",6)) fail(); # Check noun agreement... @POST L("tmp6") = N(6); group(6,6,"_noun"); pncopyvars(L("tmp6"),N(6)); fixnoun(N(6)); dqaninfo(2,3,4,5); S("olast") = 6; S("last") = S("lastn") = S("lastan") = N(6); groupnp(); @RULES _xNIL <- _xWILD [one match=(_verb _vg)] _det [opt] _xWILD [star match=(_xNUM _quan _num)] _adj [star] _noun [plus] _xALPHA _xWILD [one lookahead match=(_qEOS _fnword _xEND)] @@ # np vg prep dqan alpha alpha @CHECK # Check tenses. if (!N("noun",8) || !N("noun",9)) fail(); @POST L("tmp8") = N(8); L("tmp9") = N(9); group(9,9,"_noun"); pncopyvars(L("tmp9"),N(9)); group(8,8,"_noun"); pncopyvars(L("tmp8"),N(8)); @RULES _xNIL <- _np _vg _prep _det [star] _xWILD [star match=(_quan _num _xNUM)] _adj [star] _noun [star] _xALPHA _xALPHA _xWILD [one lookahead match=(_prep _fnword _clausesep)] @@ # of dqan and noun @POST group(2,4,"_np"); N("bracket",2) = 1; @RULES _xNIL <- of [s] _noun [plus] _conj _noun _xWILD [one lookahead match=(_vg _verb _prep _qEOS _xEND)] @@
@NODES _LINE @RULES # Taking out the big list of cities. # _CityName <- _xWILD [s one match=( _cityPhrase _cityWord)] @@
@PATH _ROOT _paragraph _sentence @POST S("comment") = makeconcept(N("object",1),"comment"); addstrval(S("comment"),"position",N("$text",9)); addstrval(S("comment"),"commentor",N("$text",10)); single(); @RULES _event <- _eventAnaphora [s] ### (1) _prep [s] ### (2) _det [s] ### (3) _xWILD [s plus except=(_companyMarker)] ### (4) _companyMarker [s] ### (5) for [s] ### (6) _money [s] ### (7) _be [s] ### (8) _position [s] ### (9) _commentor [s] ### (10) @@ @POST S("comment") = makeconcept(N("object",1),"comment"); addstrval(S("comment"),"position",N("$text",6)); addstrval(S("comment"),"degree",N("$text",5)); addstrval(S("comment"),"field",N("$text",11)); single(); @RULES _event <- _eventAnaphora [s] ### (1) _conj [s optional] ### (2) _company [s] ### (3) _have [s] ### (4) _adv [s] ### (5) strengthened [s] ### (6) _company [s] ### (7) hold [s] ### (8) on [s] ### (9) _det [s] ### (10) _xWILD [s plus except=(_field)] ### (11) _field [s] ### (12) @@
@PATH _ROOT _LINE # Should be a header from caps, not one from kb. Need to flag. @POST N("hi hdr conf") = 0; N("hi hdr class") = "NULL"; if (N("end education hdr")) N("hi hdr class") = "edu"; else if (N("end skills hdr")) N("hi hdr class") = "skills"; else if (N("end experience hdr")) N("hi hdr class") = "exp"; else if (N("education hdrs")) N("hi hdr class") = "edu"; else if (N("skills hdrs")) N("hi hdr class") = "skills"; else if (N("experience hdrs")) N("hi hdr class") = "exp"; @RULES _xNIL <- _header @@
@NODES _LINE @PRE <1,1> cap(); <1,1> length(9); @RULES # Ex: Objective _ObjectiveHeaderWord [layer=(_headerWord )] <- _xWILD [min=1 max=1 s match=("Objective")] @@
# Compute a stem for nouns and verbs @CODE G("stem") = stem("lounging"); @@CODE
@PATH _ROOT _paragraph _sentence ############################################### # Ronald DiPietro (a Certified Public Accountant) ############################################### @POST S("con") = AddPerson(N(1),0); AddUniqueStr(S("con"),"title",N("title",2)); single(); @RULES _person <- _person ### (1) _titleParen ### (2) @@ ############################################### # U.S. District Magistrate Judges Amanda M. Knapp of and Moose Manhold ############################################### @PRE <1,1> var("plural"); @POST AddPeople(N(2),"title",N("$text",1)); single(); @RULES _titleConj <- _title ### (1) ### (2) _xWILD [plus match=(_titleCaps _person \, and all the _residentOf)] ### (2) ### (3) @@ ############################################### # U.S. District Magistrate Judge Amanda M. Knapp of the Northern District of Ohio ############################################### @POST if (N(3)) { S("con") = AddPerson(N(2),N(3)); } else { S("con") = AddPerson(N(2),0); } if (N("title",1)) AddUniqueStr(S("con"),"title",N("title",1)); else AddUniqueStr(S("con"),"title",N("$text",1)); if (N("agency",1)) AddUniqueStr(S("con"),"agency",N("agency",1)); single(); @RULES _person <- _title ### (1) _person ### (3) _ofRegion [opt] ### (4) @@
@NODES _ROOT @RULES _choiceElement [unsealed] <- _whiteSpace [opt] ### (1) \| ### (2) _whiteSpace [opt] ### (3) _cp ### (4) _xWILD [opt matches=("*" "+" "?")] ### (5) @@ _seqElement [unsealed] <- _whiteSpace [opt] ### (1) \, [one] ### (2) _whiteSpace [opt] ### (3) _cp [one] ### (4) _xWILD [opt matches=("*" "+" "?")] ### (5) @@
# Convert a relative URL to an absolute URL @CODE "output.txt" << resolveurl("http://www.abcd.edu/x/y/z.html", "../gif/img1.gif")     << "\n"; prints out: http://www.abcd.edu/x/gif/img1.gif
@CODE L("hello") = 0; @@CODE @NODES _TEXTZONE @POST excise(1,1); @RULES _xNIL <- _xWHITE [plus] @@
@NODES _LINE @PRE <2,2> length(1); @POST L("text") = N("$text"); if (L("text") != "(") { L("bullet") = getconcept(G("format"),"bullet"); L("con") = AddUniqueCon(L("bullet"),L("text")); X("bullet") = 1; IncrementCount(L("con"),"count"); single(); } @RULES _bullet <- _xSTART ### (1) _xWILD [one match=(_xCTRL _xPUNCT) fail=(\()] ### (2) @@
@PATH _ROOT _doctypedecl @POST G("CurrentElementName") = str(N("ElementName",1)) ; noop() ; @@POST @RULES _xNIL <- _ElementDeclStart [one] ### (1) @@ @@RULES @POST G("CurrentElementName") = 0 ; noop() ; @@POST @RULES _xNIL <- _EndTag [one] ### (1) @@ @@RULES @RULES _Mixed <- _PCDataStart [one] ### (1) _xWILD [plus fail=("\) ")] ### (2) _whiteSpace [opt] ### (3) \) [one] ### (4) \* [one] ### (5) @@ _Mixed <- _PCDataStart [one] ### (1) _whiteSpace [opt] ### (2) \) [one] ### (3) @@ @@RULES @CHECK if (strequal(str(N("$text",1)),"EMPTY") || strequal(str(N("$text",1)),"ANY")) { fail() ; } @@CHECK @POST S("buffer1") = str(N("$text",1)) ; S("buffer2") = str(N("$text",2)) ; if (N("$text",1) && N("$text",2)) { S("ChildElementName") = S("buffer1") + S("buffer2") ; } else if ( N("$text",1)) { S("ChildElementName") = S("buffer1") ; } else if ( N("$text",2)) { S("ChildElementName") = S("buffer2") ; } G("CurrentConcept") = findconcept(G("Elements"),G("CurrentElementName")) ; G("CurrentChildConcept") = findconcept(G("CurrentConcept"),S("ChildElementName")) ; if (G("CurrentChildConcept") == 0 ) { G("CurrentChildConcept") = makeconcept(G("CurrentConcept"),S("ChildElementName")) ; G("ReferenceIDforConcept") = findconcept(G("Elements"),S("ChildElementName")) ; if (G("ReferenceIDforConcept")==0) { makeconcept(G("Elements"),S("ChildElementName")) ; } } single() ; @@POST @RULES _cp <- _xWILD [s one matches=("_xALPHA" "_" ":")] ### (1) _xWILD [s star matches=("_xALPHA" "_xNUM" "." "-" "_" ":")] ### (2) _xWILD [s opt matches=("?" "*" "+")] ### (3) @@ @@RULES @RULES _cp <- \( [one] ### (1) _PEReference [one] ### (2) \) [one] ### (3) _xWILD [opt match=("?" "+" "*")] ### (4) @@ @@RULES
@CODE DispKB(); @@CODE
@NODES _split @POST "test.log" << X("split") << ": " << N("$text", 1) << "\n"; L("code") = N("$text", 1); L("alt_code_format1") = "0"+N("$text", 1); L("alt_code_format2") = "00"+N("$text", 1); L("code_con") = findconcept(G("icd9_codes"), N("$text", 1)); L("alt_code_con1") = findconcept(G("icd9_codes"), L("alt_code_format1")); L("alt_code_con2") = findconcept(G("icd9_codes"), L("alt_code_format2")); if (L("code_con")) { # addstrval(L("code_con"), "split", X("split")); L("split_con") = findconcept(G("mimic_splits"), X("split")); makeconcept(L("split_con"), L("code")); } else if (L("alt_code_con1")) { # addstrval(L("alt_code_con1"), "split", X("split")); L("split_con") = findconcept(G("mimic_splits"), X("split")); makeconcept(L("split_con"), L("alt_code_format1")); } else if (L("alt_code_con2")) { # addstrval(L("alt_code_con2"), "split", X("split")); L("split_con") = findconcept(G("mimic_splits"), X("split")); makeconcept(L("split_con"), L("alt_code_format2")); } else { "test.log" << X("split") << ": " << N("$text", 1) << " has no match in icd9 kbb.\n"; } @RULES _xNIL <- _LINE ### (1) @@
# Find first named node in phrase. L("return_con") = findnode(L("phrase"), L("name"));
@NODES _LINE @RULES _year <- _xWILD [one s match=( 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 )] @@ _year <- _xWILD [opt s match=(\')] _xWILD [one s match=( 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 00 )] @@ _season <- _xWILD [one s match=( winter spring summer fall ) ] @@ _present <- _xWILD [one s match=( current present now ) ] @@
# Convert the strings in multi-string valued global variable to lower case @CODE G("strs") = "abc"; G("strs")[1] = "def"; gtolower("strs"); "output.txt" << G("strs") << "\n";
@NODES _split @RULES _BLANKLINE <- _xWILD [min=0 max=0 matches=(\ \t \r)] ### (1) \n ### (2) @@ _LINE <- _xWILD [min=0 max=0 fails=(\r \n)] ### (1) _xWILD [one match=(\n _xEND)] ### (2) @@
@CODE closefile(G("file")); @@CODE
@CODE ### ICD Keywords ############################################## G("words") = findconcept(findroot(),"words"); if (!G("words")) G("words") = makeconcept(findroot(),"words"); rmchildren(G("words")); G("procedure_words") = AddUniqueCon(G("words"), "procedure"); rmchildren(G("procedure_words")); G("diagnosis_words") = AddUniqueCon(G("words"), "diagnosis"); rmchildren(G("diagnosis_words")); ### ICD Terms/Attributes ####################################### G("codes") = findconcept(findroot(),"codes"); if (!G("codes")) G("codes") = makeconcept(findroot(),"codes"); rmchildren(G("codes")); G("procedure_codes") = AddUniqueCon(G("codes"), "procedure"); rmchildren(G("procedure_codes")); G("diagnosis_codes") = AddUniqueCon(G("codes"), "diagnosis"); rmchildren(G("diagnosis_codes")); @@CODE
@PATH _ROOT _Mixed @RULES _childItem <- _whiteSpace [opt] ### (1) \| [one] ### (2) _whiteSpace [opt] ### (3) _xWILD [s one matches=("_xALPHA" "_" ":")] ### (4) _xWILD [s star matches=("_xALPHA" "_xNUM" "." "-" "_" ":")] ### (5) @@ _childItem <- _whiteSpace [opt] ### (1) \| [one] ### (2) _whiteSpace [opt] ### (3) _PEReference [one] ### (4) @@ @@RULES @POST noop() ; @@POST @RULES _xNIL <- _whiteSpace [one] ### (1) @@ @@RULES @POST "output.txt" << "Error: stray characters in PCDATA section\n"; noop() ; @@POST @RULES _xNIL <- _xWILD @@ @@RULES
# TODO: PUT YOUR CONTEXT IN HERE. @NODES _TEXTZONE # TODO: SET UP USER ACTIONS HERE. @POST pncopyvars(1); # 06/05/06 AM. S("ne text") = phrasetext(); S("ne type") = "location"; S("ne type conf") = 95; S("ne") = 1; S("sem") = "location"; S("sem location") = "country"; S("mypos") = "NP"; S("country") = S("stem") = phrasetext(); if (G("verbose")) "country.txt" << phrasetext() << "\n"; single(); @RULES _country [layer=(_noun)] <- Bangladesh @@ _country [layer=(_noun)] <- Burma @@ _country [layer=(_noun)] <- Cambodia @@ _country [layer=(_noun)] <- China @@ _country [layer=(_noun)] <- India @@ _country [layer=(_noun)] <- Indonesia @@ _country [layer=(_noun)] <- Laos @@ _country [layer=(_noun)] <- Malaysia @@ _country [layer=(_noun)] <- Myanmar @@ _country [layer=(_noun)] <- Singapore @@ _country [layer=(_noun)] <- Sri _xWHITE Lanka @@ _country [layer=(_noun)] <- Thailand @@ _country [layer=(_noun)] <- Vietnam @@
@PATH _ROOT _header @POST excise(1,1); noop(); @RULES _xNIL <- _xWHITE [s] ### (1) @@
@CODE L("hello") = 0; @@CODE #@PATH _ROOT _TEXTZONE _sent @NODES _sent # det alpha # NIBBLE. @CHECK if (N("unknown",3)) succeed(); if (N("number",1) == "singular" && plural(N(3))) fail(); if (!N("verb",3)) fail(); if (N("pos num",3) < 2) fail(); @POST N("pos25 det-a-a") = 1; if (N("unknown",3)) alphatonoun(3); else { if (vconjq(N(3),"-en") && !N("noun",3) && !N("adj",3)) L("pos") = "VBN"; N("verb",3) = 0; # verb = 0 --N("pos num",3); if (N("noun",3) && N("pos num",3) == 1) alphatonoun(3); # 04/21/07 AM. if (L("pos")) N("mypos",3) = L("pos"); } @RULES _xNIL <- _det _adv [star] _xALPHA @@ # det alpha , alpha @PRE <2,2> var("adj"); <4,4> var("adj"); @POST fixnpnonhead(2); @RULES _xNIL <- _det _xALPHA \, [lookahead] _xALPHA @@ # ^ num alpha @CHECK if (N("noun",3)) { if (numbersagree(N(2),N(3))) { S("pos") = "noun"; succeed(); } else if (N("verb",3)) { S("pos") = "verb"; succeed(); } } else if (N("verb",3)) { S("pos") = "verb"; succeed(); } fail(); @POST if (S("pos") == "noun") fixnphead(3); else if (S("pos") == "verb") alphatovg(3,0,0); @RULES _xNIL <- _xSTART _num _xALPHA _xWILD [one fail=(_xALPHA _adj _noun)] @@ # prep alpha conj alpha # Note: Do an intersection of pos! @PRE <2,2> varz("pos10 p-a-x-a"); <2,2> varne("stem","to"); @CHECK # Looking for non-verb agreement. if (N("noun",3) && N("noun",5)) S("noun") = 1; if (N("adj",3) && N("adj",5)) S("adj") = 1; if (S("adj") && S("noun")) fail(); # Ambig. @POST N("pos10 p-a-x-a",2) = 1; if (S("noun")) { alphatonoun(5); alphatonoun(3); } else if (S("adj")) { alphatoadj(5); alphatoadj(3); } @RULES _xNIL <- _xWILD [one match=(_noun _np)] _prep _xALPHA _conj _xALPHA @@ #######################3 # NOUN LISTING # @PRE <1,1> var("prenoun"); @POST if (literal(N(2))) alphatonoun(2); if (pnname(N(2)) != "_np") group(2,2,"_np"); ++N("noun-list",2); @RULES _xNIL <- _prep _xWILD [one match=(_xALPHA _noun)] _xWILD [one lookahead match=( \, and _and)] @@ # NOUN LISTING # @PRE <1,1> var("prenoun"); <4,4> var("noun"); @POST if (literal(N(4))) alphatonoun(4); if (pnname(N(4)) != "_np") group(4,4,"_np"); N("noun-list",2) = 1; listadd(2,4,"true"); @RULES _xNIL <- _prep ### (1) _xWILD [one match=(_np)] ### (2) _xWILD [plus match=( \, and _and _conj)] ### (3) _xWILD [one match=(_xALPHA _noun)] ### (4) _xWILD [one lookahead match=( \, and _and _conj _qEOS _xEND)] ### (5) @@
@NODES _ROOT @RULES _subgroup <- _subgroupHeader ### (1) _xWILD [fail=(_subgroupHeader _groupHeader)] ### (2) @@
@NODES _LINE @POST noop(); @RULES _xNIL <- _xWILD [fail=(\t) group="_column"] ### (1) \t ### (2) _xWILD [fail=(\t) group="_column"] ### (3) \t ### (4) _xWILD [fail=(\t) group="_column"] ### (5) \t ### (6) _xWILD [fail=(\t) group="_column"] ### (7) \t ### (8) _xWILD [fail=(\t) group="_column"] ### (9) \t ### (8) _xWILD [fail=(\t) group="_column"] ### (9) @@
# Remove node from concept's phrase. rmnode(L("con"));
@CODE GetTitles(G("top")); GetTitles(G("rare")); DisplayKB(G("mimic_splits"), 1); SaveToKB(G("mimic_splits"),"mimic_splits"); @@CODE
@NODES _enum @RULES _itemMarker <- \\ [s] ### (1) item [s] ### (2) @@
# See if analyzer is operating in an interactive environment @CODE # In VisualText, send output to a file, but outside of VisualText, direct outputs to a user-supplied buffer. if (interactive())    G("out") = "buf.txt"; else    G("out") = cbuf(); G("out") << "Hello output!" << "\n"; @@CODE
@NODES _ROOT @POST if (num(N("words")) && num(N("words")) == num(N("caps"))) { N("level") = 1; N("possible") = 1; single(); } # else if (num(N("words")) && num(N("words")) == num(N("upper")) && N("words") == 1) { # N("level") = 2; # single(); # } @RULES _header <- _LINE ### (1) @@
# Fetch entire path of given concept as a string. L("return_str") = conceptpath(L("con"));
# Remove dictionary concept from KB. rmword(L("str"));
@PATH _ROOT _bodyZone _trZone _tdZone @POST X("value") = N("$text",3); @RULES _xNIL <- _td ### (1) _strong [opt] ### (2) _xWILD [fail=(_tdClose _strongClose)] ### (3) _strongClose [opt] ### (4) _tdClose ### (5) @@
@NODES _LINE @RULES _GPA <- Grade [s] _xWHITE [s star] Point [s] _xWHITE [s star] Average[s] @@ _GPA <- GPA [s] @@ _GPA <- G [s] \. [s opt] _xWHITE [s star] P [s] \. [s opt] _xWHITE [s star] A [s] @@ # Changing this to exploit NLP++. # @POST if ((num(N("integer")) >= 2 || num(N("integer")) <= 6) && (num(N("decimal")) >= 0 && num(N("decimal")) <= 99)) N("GPAValue") = "true"; # Good candidate for GPA. # noop() @RULES _xNIL <- _numDecimal @@ #@PRE #<1,1> numrange(3, 4) #<3,3> numrange(0, 99) #@RULES #_GPAValue [] <- # _xNUM [s] # \. [s] # _xNUM [s] @@
@PATH _ROOT _LINE _brackets @PRE <3,3> lowercase(); @POST if (!X("word",2)) { X("word",2) = N("$text",3); "words.txt" << N("$text",3) << "\n"; } @RULES _xNIL <- _xSTART ### (1) \[ ### (2) _xWILD [plus match=(_xALPHA \_ \-)] ### (3) @@
@NODES _ROOT @RULES _xNIL <- _xNIL ### (1) @@
@NODES _ROOT @POST S("label") = makeconcept(G("labels"), N("$text", 1)); single(); @RULES _labelEntry <- _xWILD [fails=(\n \r)] ### (1) _xWILD [one match=(\n \r _xEND)] ### (2) @@
@NODES _LINE @POST setbase(1,"false"); @RULES _xNIL <- _Caps @@
@NODES _ROOT @POST excise(1,1); noop(); @RULES _xNIL <- _xWHITE ### (1) @@
# Fetch the concept that node is a proxy for. L("return_con") = nodeconcept(L("node"));
@CODE DisplayKB(G("root"), 0); @@CODE
@PATH _ROOT _headerZone _iOpen @POST X("attr") = N("$text",1); X("value") = N("$text",4); @RULES _xNIL <- _xALPHA ### (1) \= ### (2) \" ### (3) _xWILD [fail=(\")] ### (4) \" ### (5) @@
@DECL ######## # FUNC: REGISTERX # SUBJ: Add string to a node's variable. # EX: register(X(4),"temporal"," # Adds a date string to "temporal" var of node # in context. # NOTE: For adding multiple values to a variable. # Should rename this to something else. # Lowercase register() conflicts with C++ reserved word, # when analyzer is compiled. ######## registerx( L("node"), # Parse tree node we are adding info to. L("field"), L("str") ) { if (!L("node") || !L("field") || !L("str")) return; L("vals") = pnvar(L("node"),L("field")); if (!L("vals")) L("len") = 0; else L("len") = arraylength(L("vals")); # Can't directly append a new value onto node. # Need something like pnaddval(L("node"),L("field"),L("str")). L("vals")[L("len")] = L("str"); pnreplaceval(L("node"),L("field"),L("vals")); } domadvl(L("n"),L("sent")) { } domcopyattrs(L("ref"),L("con")) { } domcopynodetocon(L("n"),L("con")) { } dommergeevent(L("ref"),L("cand")) { } domnewevent(L("ref"),L("con")) { } domnp(L("n"),L("sent")) { } ######## # FUNC: DOMNPOFNP # SUBJ: Try to categorize named entity. ######## domnpofnp( L("n"), # Reduced np. L("np1"), # First np. L("np2")) # of-np. { if (!pnvar(L("np2"),"ne") || pnvar(L("np2"),"ne type")) return; if (pnvar(L("np1"),"ne") || !pnvar(L("np1"),"sem")) return; L("sem1") = pnvar(L("np1"),"sem"); if (L("sem1") == "city") pnreplaceval(L("np2"),"ne type","location"); } ######## # FUNC: DOMOBJECTREGISTER # SUBJ: Domain- and task-specific register of object. # NOTE: Domain functions calling generic functions. # The reverse of previous implementations in Parse_EN-US. # This lets us do domain and task stuff as we like. # (One could go domain-to-generic or generic-to-domain, etc.) ######## domobjectregister( L("n"), # Object's pnode. eg, an _np node. L("cnode") # Object's clause pnode. ) { # Generic register of object reference in kb. L("obj") = objectregister(L("n"),L("cnode")); if (!L("obj")) return 0; # Generic resolve of object reference against list of objects # in the kb. L("object") = resolveobject(L("obj"),G("objects")); # Entities. # If the object has an array of entities, use that. # nenoderange( L("arr") = pnvar(L("n"),"ne arr"); if (L("arr")) { L("ii") = 0; while (L("arr")[L("ii")]) { L("x") = L("arr")[L("ii")]; L("ent") = entregister(L("x"),L("n")); L("entity") = resolveentity(L("ent"),G("entities")); ++L("ii"); } return L("obj"); } # Else if the object itself is a named entity, use that. L("ent") = entregister(L("n"),L("n")); L("entity") = resolveentity(L("ent"),G("entities")); return L("obj"); } ######## # FUNC: DOMENTITYREGISTER # SUBJ: Domain- and task-specific register of entity. ######## domentityregister( L("n"), # Object's pnode. eg, an _np node. L("cnode") # Object's clause pnode. ) { if (!L("n")) return 0; # Entities. # If the object has an array of entities, use that. # nenoderange( L("arr") = pnvar(L("n"),"ne arr"); if (L("arr")) { L("ii") = 0; while (L("arr")[L("ii")]) { L("x") = L("arr")[L("ii")]; L("ent") = entregister(L("x"),L("n")); L("entity") = resolveentity(L("ent"),G("entities")); ++L("ii"); } return L("entity"); # What if an array? ... } # Else if the object itself is a named entity, use that. L("ent") = entregister(L("n"),L("n")); L("entity") = resolveentity(L("ent"),G("entities")); return L("entity"); } @CODE L("hello") = 0; @@CODE
@MULTI _ROOT _LINE _split @POST excise(1,1); @RULES _xNIL <- _xWILD [one matches=(\n \r _BLANKLINE)] ### (1) @@
@NODES _LINE @RULES # Ex: interdisciplinary _fieldNames <- _xWILD [min=1 max=1 s match=(interdisciplinary accounting acoustics adapted administration adult advertising aeronautical aeronautics aerospace aesthetics affairs african africology afro agribusiness agricultural agronomy air american analysis anatomy animal anthropology apparel applied aquatic archaeology architectural architecture army art arts asian assistance astronautics astronomy astrophysics athletic atmospheric audiology automotive aviation banking bilingual biochemistry bioengineering biological biology biomedical biophysics botany brain building business care cell ceramic chemical chemistry childhood chinese city civil classics climatology clinch clinical coastal cognitive communication communications communicative community comparative computer computing conservation construction consumer continuing control cooperative counseling counselor creative criminal criticism cs curriculum dairy decision demography dental dentistry dermatology design development developmental dietetics disorders drama early earth east eastern ecology econometrics economics education educational ee eecs electric electrical electronics elementary energy enforcement engineering english entomology environmental epidemiology estate ethics ethnic european evaluation exercise family fashion film finance fine fisheries fitness food force foreign forest forestry foundations french gas general genetics geochemistry geography geology geophysics geosciences german germanic gerontological gerontology government grain greek health hebrew history horticultural horticulture hospitality hotel human humanistic humanities husbandry immunology individual industrial information institutional instruction instructional insurance accountancy interior international italian japanese jewish journalism justice kinesiology labor laboratory landscape language languages latin law leadership learning legal leisure liberal library linguistics literary literature literatures lithuanian logistics management manufacturing marketing mass materials math mathematics mechanical mechanics media medical medicinal medicine metals meteorology microbiology middle military mineral mineralogy mining modern molecular motor music natural navy nematology neuroscience nuclear nutrition occupational ocean oceanographic oceanography office operations optics optometry oral organization park pathobiology pathology pedagogy pediatrics performance personnel pest petroleum pharmaceutical pharmaceutics pharmacology pharmacy philosophy physical physics physiology planetary planning plant police policy political pollution polymer portuguese poultry power processing program programs promotion psychology public quality quantitative radiologic radiological reading real recreation regional rehabilitation relations religion religious research resource resources respiratory restaurant romance rotc rural russian school science sciences service services slavic social sociology software soil solid spanish special speech sport sports state statistics structural student studies systems taxation teacher teaching technology telecommunications telemedicine tesl textile textiles theater theatre theology theory therapeutics therapist therapy thought tourism training transportation urban valley veterinary video visual voice water wildlife women work workforce writing zoology)] @@
# Perform a reduction on the range of rule elements from node1 to node2 and name the group node labelString. node1 and node2 should be a well-formed range in the current rule match.  For example from N(1) to N(3) @POST L("n") = group(N(1),N(2),"_np"); "output.txt" << pnname(L("n")) << "\n"; @RULES _xNIL <- _det _noun _xWILD [s lookahead fail=(_noun)] @@ output.txt then gets an output like: _np
@NODES _LINE @POST ResolvePronoun(X("con"),N("con")); @RULES _xNIL <- _pro ### (1) @@
@NODES _ROOT @CODE G("people") = 0; @@CODE
@NODES _ROOT @POST S("code") = N("$text", 1); S("term") = N("$text", 3); excise(4,4); excise(1,2); single(); @RULES _entry<- _xWILD [fails=(\t _icdRoot)] ### (1) \t ### (2) _xWILD [fails=(\n \r)] ### (3) _xWILD [one matches=(\n \r)] ### (4) @@
@NODES _ROOT @RULES _trStart <- \< tr \> @@ _trEnd <- \< \/ tr \> @@ _tdStart <- \< td \> @@ _tdEnd <- \< \/ td \> @@ _thStart <- \< th \> @@ _thEnd <- \< \/ th \> @@ _aStart <- \< a _xWILD [fail=(\>)] \> @@ _aEnd <- \< \/ a \> @@
@CODE L("debugpath") = G("$kbpath") + "debug.txt"; G("debug") = openfile(L("debugpath"),"app"); G("words") = findconcept(findroot(),"words"); if (!G("words")) G("words") = makeconcept(findroot(),"words"); #rmchildren(G("words")); G("stats") = findconcept(findroot(),"stats"); if (!G("stats")) G("stats") = makeconcept(findroot(),"stats"); #rmchildren(G("stats")); G("conjugations") = findconcept(findroot(),"conjugations"); if (!G("conjugations")) G("conjugations") = makeconcept(findroot(),"conjugations"); #rmchildren(G("conjugations")); @@CODE
@PATH _ROOT _LINE # If there's a "The" before a cap phrase, add it in. # Reassess some things accordingly. # Need a function to find a good runner-up. # If it's humanname or job title, should trash those assignments. @PRE <1,1> cap(); @POST if (N("hi class",3) == "humanname" || N("hi class",3) == "job title") { # Todo: find runner up confidence here. N("hi class",3) = "NULL"; N("hi conf",3) = 0; } listadd(3,1,"true"); @RULES _xNIL <- The [s] _xWHITE [s star] _Caps [s] @@
@CODE if (G("pretagged")) exitpass(); if (!G("hilite")) # 10/25/10 AM. exitpass(); # 10/25/10 AM. G("hello") = 0; @@CODE # Traverse the whole tree. @MULTI _ROOT @POST if (G("verbose")) "np.txt" << N("$text") << "\n"; noop(); # Merely matching the rule will set text to green. @RULES _xNIL <- _xWILD [one match=( _np _nps )] @@
@NODES _term @POST X("code") = N("$text", 2); @RULES _xNIL <- _xSTART ### (1) _xNUM ### (2) \, ### (3) _xWILD @@
@NODES _ROOT @PRE <2,2> vargt("prose",2); @POST listadd(1,2,"false"); @RULES _xNIL <- _prose _LINE @@ @PRE <1,1> vargt("prose",2); <2,2> vargt("prose",2); @RULES _prose <- _LINE _LINE @@
@DECL ############################################### # General functions ############################################### AddUniqueCon(L("concept"),L("name")) { L("con") = findconcept(L("concept"),L("name")); if (!L("con")) L("con") = makeconcept(L("concept"),L("name")); return L("con"); } AddUniqueStr(L("concept"),L("attr"),L("value")) { if (L("value") && strval(L("concept"),L("attr")) != L("value")) addstrval(L("concept"),L("attr"),L("value")); } AddUniqueNum(L("concept"),L("attr"),L("value")) { "unique.txt" << L("attr") << " " << str(L("value")) << " " << conceptpath(L("concept")) << "\n"; L("val") = AttrValues(L("concept"),L("attr")); while (L("val")) { L("num") = getnumval(L("val")); "unique.txt" << " value: " << str(L("num")) << "\n"; if (L("num") == L("value")) return 0; L("val") = nextval(L("val")); } addnumval(L("concept"),L("attr"),L("value")); return 1; } AddUniqueConVal(L("concept"),L("attr"),L("value")) { "unique.txt" << L("attr") << " " << conceptpath(L("concept")) << " ==> " << L("attr") << " -- " << conceptpath(L("value")) << "\n"; L("val") = AttrValues(L("concept"),L("attr")); while (L("val")) { L("con") = getconval(L("val")); "unique.txt" << conceptname(L("con")) << "\n"; if (conceptpath(L("con")) == conceptpath(L("value"))) return 0; L("val") = nextval(L("val")); } addconval(L("concept"),L("attr"),L("value")); return 1; } CopyAttr(L("from"),L("to"),L("attr")) { L("from value") = strval(L("from"),L("attr")); if (L("from value")) { L("to value") = strval(L("to"),L("attr")); if (L("from value") && !L("to value")) addstrval(L("to"),L("attr"),L("from value")); } } CopyAttrNew(L("from"),L("to"),L("attr from"),L("attr to")) { L("from value") = strval(L("from"),L("attr from")); if (L("from value")) { L("to value") = strval(L("to"),L("attr to")); if (L("from value") && !L("to value")) addstrval(L("to"),L("attr to"),L("from value")); } } CopyConAttr(L("from"),L("to"),L("attr")) { L("from value") = conval(L("from"),L("attr")); if (L("from value")) { L("to value") = conval(L("to"),L("attr")); if (L("from value") && !L("to value")) addconval(L("to"),L("attr"),L("from value")); } } AttrValues(L("con"),L("attr")) { L("at") = findattr(L("con"),L("attr")); if (L("at")) return attrvals(L("at")); return 0; } LastChild(L("parent")) { L("child") = down(L("parent")); while (L("child")) { L("last") = L("child"); L("child") = next(L("child")); } return L("last"); } MakeCountCon(L("con"),L("count name")) { L("count name") = CountName(L("con"),L("count name")); return makeconcept(L("con"),L("count name")); } IncrementCount(L("con"),L("countname")) { L("count") = numval(L("con"),L("countname")); if (L("count")) { L("count") = L("count") + 1; replaceval(L("con"),L("countname"),L("count")); } else { addnumval(L("con"),L("countname"),1); L("count") = 1; } return L("count"); } CountName(L("con"),L("root")) { L("count") = IncrementCount(L("con"),L("root")); return L("root") + str(L("count")); } StripEndDigits(L("name")) { if (strisdigit(L("name"))) return 0; L("len") = strlength(L("name")) - 1; L("i") = L("len") - 1; L("str") = strpiece(L("name"),L("i"),L("len")); while (strisdigit(L("str")) && L("i")) { L("i")--; L("str") = strpiece(L("name"),L("i"),L("len")); } return strpiece(L("name"),0,L("i")); } ############################################### # KB Dump Functins ############################################### DumpKB(L("con"),L("file")) { L("dir") = G("$apppath") + "/kb/"; L("filename") = L("dir") + L("file") + ".kb"; if (!kbdumptree(L("con"),L("filename"))) { "kb.txt" << "FAILED dump: " << L("filename") << "\n"; } else { "kb.txt" << "DUMPED: " << L("filename") << "\n"; } } TakeKB(L("filename")) { L("path") = G("$apppath") + "kb\\" + L("filename") + ".kb"; "kb.txt" << "Taking: " << L("path") << "\n"; if (take(L("path"))) { "kb.txt" << " Taken successfully: " << L("path") << "\n"; } else { "kb.txt" << " Taken FAILED: " << L("path") << "\n"; } } ChildCount(L("con")) { L("count") = 0; L("child") = down(L("con")); while (L("child")) { L("count")++; L("child") = next(L("child")); } return L("count"); } ############################################### # KBB DISPLAY FUNCTIONS ############################################### DisplayKB(L("top con"),L("full")) { L("file") = DisplayFileName(); DisplayKBRecurse(L("file"),L("top con"),0,L("full")); L("file") << "\n"; return L("top con"); } KBHeader(L("text")) { L("file") = DisplayFileName(); L("file") << "#######################\n"; L("file") << "# " << L("text") << "\n"; L("file") << "#######################\n\n"; } DisplayFileName() { if (num(G("$passnum")) < 10) { L("file") = "ana00" + str(G("$passnum")); }else if (num(G("$passnum")) < 100) { L("file") = "ana0" + str(G("$passnum")); } else { L("file") = "ana" + str(G("$passnum")); } L("file") = L("file") + ".kbb"; return L("file"); } DisplayKBRecurse(L("file"),L("con"),L("level"),L("full")) { while (L("con")) { L("file") << SpacesStr(L("level")+1) << conceptname(L("con")); DisplayAttributes(L("file"),L("con"),L("full"),L("level")); L("file") << "\n"; if (down(L("con"))) { L("lev") = 1; DisplayKBRecurse(L("file"),down(L("con")),L("level")+L("lev"),L("full")); } if (L("level") == 0) return 0; L("con") = next(L("con")); } } DisplayAttributes(L("file"),L("con"),L("full"),L("level")) { L("attrs") = findattrs(L("con")); if (L("attrs")) L("file") << ": "; if (L("full") && L("attrs")) L("file") << "\n"; L("first attr") = 1; while (L("attrs")) { L("vals") = attrvals(L("attrs")); if (!L("full") && !L("first attr")) { L("file") << ", "; } if (L("full")) { if (!L("first attr")) L("file") << "\n"; L("file") << SpacesStr(L("level")+2); } L("file") << attrname(L("attrs")) << "=["; L("first") = 1; while (L("vals")) { if (!L("first")) L("file") << ","; L("val") = getstrval(L("vals")); L("num") = getnumval(L("vals")); L("con") = getconval(L("vals")); if (L("con")) { L("file") << conceptpath(L("con")); } else if (!L("full") && strlength(L("val")) > 20) { L("shorty") = strpiece(L("val"),0,20); L("file") << L("shorty"); L("file") << "..."; if (strendswith(L("val"),"\"")) L("file") << "\""; } else if (L("num") > -1) { L("file") << str(L("num")); } else { L("file") << L("val"); } L("first") = 0; L("vals") = nextval(L("vals")); } L("file") << "]"; L("first attr") = 0; L("attrs") = nextattr(L("attrs")); } } # Because NLP++ doesn't allow for empty strings, # this function can only be called with "num" >= 1 SpacesStr(L("num")) { L("n") = 1; L("spaces") = " "; while (L("n") < L("num")) { L("spaces") = L("spaces") + " "; L("n")++; } return L("spaces"); } ############################################### # DICTIONARY FUNCTIONS ############################################### DictionaryClear() { G("dictionary path") = G("$apppath") + "\\kb\\user\\dictionary.kb"; G("dictionary") = openfile(G("dictionary path")); } DictionaryWord(L("word"),L("attrName"),L("value"),L("attrType")) { L("file") = G("dictionary"); if (!dictfindword(L("word"))) L("file") << "add word \"" + L("word") + "\"\n"; L("file") << "ind attr\n" << findwordpath(L("word")) << "\n0\n"; L("file") << findwordpath(L("attrName")) << "\n"; if (L("attrType") == "str") L("file") << "pst\n" << L("value"); else if (L("attrType") == "num") L("file") << "pnum\n" << str(L("value")); else if (L("attrType") == "con") L("file") << "pcon\n" << conceptpath(L("value")); L("file") << "\nend ind\n\n"; } DictionaryEnd() { G("dictionary") << "\nquit\n\n"; closefile(G("dictionary")); } @@DECL
@NODES _areaCodes @POST L("areacodes") = getconcept(G("state"),"areacodes"); makeconcept(L("areacodes"),N("$text")); @RULES _xNIL <- _xNUM ### (1) @@
@NODES _LINE @POST X("start") = 1; single(); @RULES _start <- ind ### (1) attr ### (2) @@ @POST X("end") = 1; single(); @RULES _end <- end ### (1) ind ### (2) @@
@NODES _ROOT ################################################## # Lookup of character codes in &ALPHABETIC; format # (but not whitespace chars -- these are handled # in the rule block for whitespace ################################################## @POST G("root") = findroot() ; G("character_codes") = findconcept(G("root"),"character_codes") ; if (G("character_codes")) { G("current code") = findconcept(G("character_codes"),N("$text",2)) ; } else G("current code") = 0; if (G("current code")) { G("new text") = strval(G("current code"),"textValue") ; if (G("new text")) { S("textValue") = G("new text") ; } } single(); @@POST @RULES _specialCharacter <- \& [trig one] ### (1) _xALPHA [one] ### (2) \; [one] ### (3) @@ @@RULES ################################################## # Lookup of character codes in &#NUMERIC; format # (but not whitespace chars -- these are handled # in the rule block for whitespace ################################################## @POST G("root") = findroot() ; G("character_codes") = findconcept(G("root"),"character_codes") ; G("current code") = findconcept(G("character_codes"),N("$text",3)) ; if (G("current code")) { G("new text") = strval(G("current code"),"textValue") ; if (G("new text")) { S("textValue") = G("new text") ; } } single(); @@POST @RULES _specialCharacter <- \& [one] ### (1) \# [trig one] ### (2) _xNUM [one] ### (3) \; [one] ### (4) @@ @@RULES ################################################## # Lookup of character codes in &#xHEX; format # (but not whitespace chars -- these are handled # in the rule block for whitespace ################################################## @POST G("root") = findroot() ; G("character_codes") = findconcept(G("root"),"character_codes") ; G("current code") = findconcept(G("character_codes"),N("$text",4)) ; if (G("current code")) { G("new text") = strval(G("current code"),"textValue") ; if (G("new text")) { S("textValue") = G("new text") ; } } single(); @@POST @RULES _specialCharacter <- \& [one] ### (1) \# [one] ### (2) x [trig one] ### (3) _xWILD [min=1 max=4 matches=("0" "1" "2" "3" "4" "5" "6" "7" "8" "9" "A" "B" "C" "D" "E" "F" "a" "b" "c" "d" "e" "f")] ### (4) \; [one] ### (5) @@ @@RULES
@PATH _ROOT _paragraph _sentence @POST S("type") = "total"; single(); @RULES _operator <- a total of @@ @POST S("type") = ">"; single(); @RULES _operator <- more than @@
@NODES _LINE @RULES # Ex: afro\_\\-\_american\_and\_african\_studies _field [layer=(_Caps )] <- afro [s] _xWHITE [star s] \\ [s] \- [s] _xWHITE [star s] american [s] _xWHITE [star s] and [s] _xWHITE [star s] african [s] _xWHITE [star s] studies [s] @@ # Ex: finance\_\\,\_insurance\_and\_real _field [layer=(_Caps )] <- finance [s] _xWHITE [star s] \\ [s] \, [s] _xWHITE [star s] insurance [s] _xWHITE [star s] and [s] _xWHITE [star s] real [s] @@ # Ex: exercise\_physiology\_\\/\_adult\_fitness _field [layer=(_Caps )] <- exercise [s] _xWHITE [star s] physiology [s] _xWHITE [star s] \\ [s] \/ [s] _xWHITE [star s] adult [s] _xWHITE [star s] fitness [s] @@ # Ex: atmospheric\_\\,\_and\_planetary\_sciences _field [layer=(_Caps )] <- atmospheric [s] _xWHITE [star s] \\ [s] \, [s] _xWHITE [star s] and [s] _xWHITE [star s] planetary [s] _xWHITE [star s] sciences [s] @@ # Ex: writing\_and\_humanistic\_studies _field [layer=(_Caps )] <- writing [s] _xWHITE [star s] and [s] _xWHITE [star s] humanistic [s] _xWHITE [star s] studies [s] @@ # Ex: women\_\\'\_s\_studies _field [layer=(_Caps )] <- women [s] _xWHITE [star s] \\ [s] \' [s] _xWHITE [star s] s [s] _xWHITE [star s] studies [s] @@ # Ex: wildlife\_and\_fisheries\_science _field [layer=(_Caps )] <- wildlife [s] _xWHITE [star s] and [s] _xWHITE [star s] fisheries [s] _xWHITE [star s] science [s] @@ # Ex: voice\_performance\_and\_pedagogy _field [layer=(_Caps )] <- voice [s] _xWHITE [star s] performance [s] _xWHITE [star s] and [s] _xWHITE [star s] pedagogy [s] @@ # Ex: urban\_studies\_and\_planning _field [layer=(_Caps )] <- urban [s] _xWHITE [star s] studies [s] _xWHITE [star s] and [s] _xWHITE [star s] planning [s] @@ # Ex: urban\_and\_regional\_planning _field [layer=(_Caps )] <- urban [s] _xWHITE [star s] and [s] _xWHITE [star s] regional [s] _xWHITE [star s] planning [s] @@ # Ex: urban\_and\_environmental\_planning _field [layer=(_Caps )] <- urban [s] _xWHITE [star s] and [s] _xWHITE [star s] environmental [s] _xWHITE [star s] planning [s] @@ # Ex: transportation\_systems\_and\_management _field [layer=(_Caps )] <- transportation [s] _xWHITE [star s] systems [s] _xWHITE [star s] and [s] _xWHITE [star s] management [s] @@ # Ex: student\_counseling\_and\_personnel _field [layer=(_Caps )] <- student [s] _xWHITE [star s] counseling [s] _xWHITE [star s] and [s] _xWHITE [star s] personnel [s] @@ # Ex: soil\_and\_water\_science _field [layer=(_Caps )] <- soil [s] _xWHITE [star s] and [s] _xWHITE [star s] water [s] _xWHITE [star s] science [s] @@ # Ex: slavic\_languages\_and\_literatures _field [layer=(_Caps )] <- slavic [s] _xWHITE [star s] languages [s] _xWHITE [star s] and [s] _xWHITE [star s] literatures [s] @@ # Ex: romance\_languages\_and\_literatures _field [layer=(_Caps )] <- romance [s] _xWHITE [star s] languages [s] _xWHITE [star s] and [s] _xWHITE [star s] literatures [s] @@ # Ex: recreation\_and\_park\_management _field [layer=(_Caps )] <- recreation [s] _xWHITE [star s] and [s] _xWHITE [star s] park [s] _xWHITE [star s] management [s] @@ # Ex: quality\_and\_manufacturing\_management _field [layer=(_Caps )] <- quality [s] _xWHITE [star s] and [s] _xWHITE [star s] manufacturing [s] _xWHITE [star s] management [s] @@ # Ex: political\_and\_social\_thought _field [layer=(_Caps )] <- political [s] _xWHITE [star s] and [s] _xWHITE [star s] social [s] _xWHITE [star s] thought [s] @@ # Ex: petroleum\_and\_natural\_gas _field [layer=(_Caps )] <- petroleum [s] _xWHITE [star s] and [s] _xWHITE [star s] natural [s] _xWHITE [star s] gas [s] @@ # Ex: nuclear\_and\_radiological\_engineering _field [layer=(_Caps )] <- nuclear [s] _xWHITE [star s] and [s] _xWHITE [star s] radiological [s] _xWHITE [star s] engineering [s] @@ # Ex: microbiology\_and\_cell\_science _field [layer=(_Caps )] <- microbiology [s] _xWHITE [star s] and [s] _xWHITE [star s] cell [s] _xWHITE [star s] science [s] @@ # Ex: library\_and\_information\_science _field [layer=(_Caps )] <- library [s] _xWHITE [star s] and [s] _xWHITE [star s] information [s] _xWHITE [star s] science [s] @@ # Ex: insurance\_and\_real\_estate _field [layer=(_Caps )] <- insurance [s] _xWHITE [star s] and [s] _xWHITE [star s] real [s] _xWHITE [star s] estate [s] @@ # Ex: instructional\_technology\_and\_telecommunications _field [layer=(_Caps )] <- instructional [s] _xWHITE [star s] technology [s] _xWHITE [star s] and [s] _xWHITE [star s] telecommunications [s] @@ # Ex: industrial\_and\_systems\_engineering _field [layer=(_Caps )] <- industrial [s] _xWHITE [star s] and [s] _xWHITE [star s] systems [s] _xWHITE [star s] engineering [s] @@ # Ex: industrial\_and\_manufacturing\_systems _field [layer=(_Caps )] <- industrial [s] _xWHITE [star s] and [s] _xWHITE [star s] manufacturing [s] _xWHITE [star s] systems [s] @@ # Ex: industrial\_and\_manufacturing\_engineering _field [layer=(_Caps )] <- industrial [s] _xWHITE [star s] and [s] _xWHITE [star s] manufacturing [s] _xWHITE [star s] engineering [s] @@ # Ex: health\_education\_and\_promotion _field [layer=(_Caps )] <- health [s] _xWHITE [star s] education [s] _xWHITE [star s] and [s] _xWHITE [star s] promotion [s] @@ # Ex: foreign\_languages\_and\_literatures _field [layer=(_Caps )] <- foreign [s] _xWHITE [star s] languages [s] _xWHITE [star s] and [s] _xWHITE [star s] literatures [s] @@ # Ex: fisheries\_and\_aquatic\_sciences _field [layer=(_Caps )] <- fisheries [s] _xWHITE [star s] and [s] _xWHITE [star s] aquatic [s] _xWHITE [star s] sciences [s] @@ # Ex: film\_and\_media\_studies _field [layer=(_Caps )] <- film [s] _xWHITE [star s] and [s] _xWHITE [star s] media [s] _xWHITE [star s] studies [s] @@ # Ex: family\_and\_consumer\_sciences _field [layer=(_Caps )] <- family [s] _xWHITE [star s] and [s] _xWHITE [star s] consumer [s] _xWHITE [star s] sciences [s] @@ # Ex: exercise\_and\_sport\_sciences _field [layer=(_Caps )] <- exercise [s] _xWHITE [star s] and [s] _xWHITE [star s] sport [s] _xWHITE [star s] sciences [s] @@ # Ex: environmental\_planning\_and\_management _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] planning [s] _xWHITE [star s] and [s] _xWHITE [star s] management [s] @@ # Ex: engineering\_and\_applied\_science _field [layer=(_Caps )] <- engineering [s] _xWHITE [star s] and [s] _xWHITE [star s] applied [s] _xWHITE [star s] science [s] @@ # Ex: electrical\_and\_computer\_engineering _field [layer=(_Caps )] <- electrical [s] _xWHITE [star s] and [s] _xWHITE [star s] computer [s] _xWHITE [star s] engineering [s] @@ # Ex: decision\_and\_information\_sciences _field [layer=(_Caps )] <- decision [s] _xWHITE [star s] and [s] _xWHITE [star s] information [s] _xWHITE [star s] sciences [s] @@ # Ex: dairy\_and\_poultry\_sciences _field [layer=(_Caps )] <- dairy [s] _xWHITE [star s] and [s] _xWHITE [star s] poultry [s] _xWHITE [star s] sciences [s] @@ # Ex: computing\_and\_information\_sciences _field [layer=(_Caps )] <- computing [s] _xWHITE [star s] and [s] _xWHITE [star s] information [s] _xWHITE [star s] sciences [s] @@ # Ex: computer\_science\_and\_engineering _field [layer=(_Caps )] <- computer [s] _xWHITE [star s] science [s] _xWHITE [star s] and [s] _xWHITE [star s] engineering [s] @@ # Ex: computer\_and\_information\_science _field [layer=(_Caps )] <- computer [s] _xWHITE [star s] and [s] _xWHITE [star s] information [s] _xWHITE [star s] science [s] @@ # Ex: coastal\_and\_oceanographic\_engineering _field [layer=(_Caps )] <- coastal [s] _xWHITE [star s] and [s] _xWHITE [star s] oceanographic [s] _xWHITE [star s] engineering [s] @@ # Ex: clinical\_and\_school\_psychology _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] and [s] _xWHITE [star s] school [s] _xWHITE [star s] psychology [s] @@ # Ex: clinical\_and\_health\_psychology _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] and [s] _xWHITE [star s] health [s] _xWHITE [star s] psychology [s] @@ # Ex: civil\_and\_environmental\_engineering _field [layer=(_Caps )] <- civil [s] _xWHITE [star s] and [s] _xWHITE [star s] environmental [s] _xWHITE [star s] engineering [s] @@ # Ex: brain\_and\_cognitive\_sciences _field [layer=(_Caps )] <- brain [s] _xWHITE [star s] and [s] _xWHITE [star s] cognitive [s] _xWHITE [star s] sciences [s] @@ # Ex: biological\_and\_agricultural\_engineering _field [layer=(_Caps )] <- biological [s] _xWHITE [star s] and [s] _xWHITE [star s] agricultural [s] _xWHITE [star s] engineering [s] @@ # Ex: agricultural\_and\_biological\_engineering _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] and [s] _xWHITE [star s] biological [s] _xWHITE [star s] engineering [s] @@ # Ex: afro\_\\-\_american\_studies _field [layer=(_Caps )] <- afro [s] _xWHITE [star s] \\ [s] \- [s] _xWHITE [star s] american [s] _xWHITE [star s] studies [s] @@ # Ex: veterinary\_medical\_sciences _field [layer=(_Caps )] <- veterinary [s] _xWHITE [star s] medical [s] _xWHITE [star s] sciences [s] @@ # Ex: training\_and\_development _field [layer=(_Caps )] <- training [s] _xWHITE [star s] and [s] _xWHITE [star s] development [s] @@ # Ex: solid\_state\_science _field [layer=(_Caps )] <- solid [s] _xWHITE [star s] state [s] _xWHITE [star s] science [s] @@ # Ex: social\_studies\_education _field [layer=(_Caps )] <- social [s] _xWHITE [star s] studies [s] _xWHITE [star s] education [s] @@ # Ex: physical\_therapist\_assistance _field [layer=(_Caps )] <- physical [s] _xWHITE [star s] therapist [s] _xWHITE [star s] assistance [s] @@ # Ex: pharmacology\_and\_therapeutics _field [layer=(_Caps )] <- pharmacology [s] _xWHITE [star s] and [s] _xWHITE [star s] therapeutics [s] @@ # Ex: natural\_resources\_conservation _field [layer=(_Caps )] <- natural [s] _xWHITE [star s] resources [s] _xWHITE [star s] conservation [s] @@ # Ex: natural\_resource\_conservation _field [layer=(_Caps )] <- natural [s] _xWHITE [star s] resource [s] _xWHITE [star s] conservation [s] @@ # Ex: mineral\_engineering\_management _field [layer=(_Caps )] <- mineral [s] _xWHITE [star s] engineering [s] _xWHITE [star s] management [s] @@ # Ex: middle\_eastern\_languages _field [layer=(_Caps )] <- middle [s] _xWHITE [star s] eastern [s] _xWHITE [star s] languages [s] @@ # Ex: middle\_east\_studies _field [layer=(_Caps )] <- middle [s] _xWHITE [star s] east [s] _xWHITE [star s] studies [s] @@ # Ex: manufacturing\_engineering\_technology _field [layer=(_Caps )] <- manufacturing [s] _xWHITE [star s] engineering [s] _xWHITE [star s] technology [s] @@ # Ex: management\_information\_systems _field [layer=(_Caps )] <- management [s] _xWHITE [star s] information [s] _xWHITE [star s] systems [s] @@ # Ex: management\_and\_organization _field [layer=(_Caps )] <- management [s] _xWHITE [star s] and [s] _xWHITE [star s] organization [s] @@ # Ex: law\_and\_justice _field [layer=(_Caps )] <- law [s] _xWHITE [star s] and [s] _xWHITE [star s] justice [s] @@ # Ex: latin\_american\_studies _field [layer=(_Caps )] <- latin [s] _xWHITE [star s] american [s] _xWHITE [star s] studies [s] @@ # Ex: laboratory\_animal\_medicine _field [layer=(_Caps )] <- laboratory [s] _xWHITE [star s] animal [s] _xWHITE [star s] medicine [s] @@ # Ex: instruction\_and\_curriculum _field [layer=(_Caps )] <- instruction [s] _xWHITE [star s] and [s] _xWHITE [star s] curriculum [s] @@ # Ex: human\_resources\_management _field [layer=(_Caps )] <- human [s] _xWHITE [star s] resources [s] _xWHITE [star s] management [s] @@ # Ex: human\_resource\_management _field [layer=(_Caps )] <- human [s] _xWHITE [star s] resource [s] _xWHITE [star s] management [s] @@ # Ex: human\_resource\_development _field [layer=(_Caps )] <- human [s] _xWHITE [star s] resource [s] _xWHITE [star s] development [s] @@ # Ex: health\_services\_administration _field [layer=(_Caps )] <- health [s] _xWHITE [star s] services [s] _xWHITE [star s] administration [s] @@ # Ex: health\_science\_education _field [layer=(_Caps )] <- health [s] _xWHITE [star s] science [s] _xWHITE [star s] education [s] @@ # Ex: health\_information\_technology _field [layer=(_Caps )] <- health [s] _xWHITE [star s] information [s] _xWHITE [star s] technology [s] @@ # Ex: health\_information\_administration _field [layer=(_Caps )] <- health [s] _xWHITE [star s] information [s] _xWHITE [star s] administration [s] @@ # Ex: health\_evaluation\_sciences _field [layer=(_Caps )] <- health [s] _xWHITE [star s] evaluation [s] _xWHITE [star s] sciences [s] @@ # Ex: health\_care\_management _field [layer=(_Caps )] <- health [s] _xWHITE [star s] care [s] _xWHITE [star s] management [s] @@ # Ex: foundations\_of\_education _field [layer=(_Caps )] <- foundations [s] _xWHITE [star s] of [s] _xWHITE [star s] education [s] @@ # Ex: foreign\_language\_education _field [layer=(_Caps )] <- foreign [s] _xWHITE [star s] language [s] _xWHITE [star s] education [s] @@ # Ex: food\_service\_management _field [layer=(_Caps )] <- food [s] _xWHITE [star s] service [s] _xWHITE [star s] management [s] @@ # Ex: film\_and\_video _field [layer=(_Caps )] <- film [s] _xWHITE [star s] and [s] _xWHITE [star s] video [s] @@ # Ex: environmental\_pollution\_control _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] pollution [s] _xWHITE [star s] control [s] @@ # Ex: environmental\_engineering\_sciences _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] engineering [s] _xWHITE [star s] sciences [s] @@ # Ex: electric\_power\_engineering _field [layer=(_Caps )] <- electric [s] _xWHITE [star s] power [s] _xWHITE [star s] engineering [s] @@ # Ex: early\_childhood\_education _field [layer=(_Caps )] <- early [s] _xWHITE [star s] childhood [s] _xWHITE [star s] education [s] @@ # Ex: curriculum\_and\_instruction _field [layer=(_Caps )] <- curriculum [s] _xWHITE [star s] and [s] _xWHITE [star s] instruction [s] @@ # Ex: cooperative\_education\_program _field [layer=(_Caps )] <- cooperative [s] _xWHITE [star s] education [s] _xWHITE [star s] program [s] @@ # Ex: clinical\_laboratory\_sciences _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] laboratory [s] _xWHITE [star s] sciences [s] @@ # Ex: clinical\_laboratory\_science _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] laboratory [s] _xWHITE [star s] science [s] @@ # Ex: banking\_and\_finance _field [layer=(_Caps )] <- banking [s] _xWHITE [star s] and [s] _xWHITE [star s] finance [s] @@ # Ex: astronomy\_and\_astrophysics _field [layer=(_Caps )] <- astronomy [s] _xWHITE [star s] and [s] _xWHITE [star s] astrophysics [s] @@ # Ex: applied\_information\_technology _field [layer=(_Caps )] <- applied [s] _xWHITE [star s] information [s] _xWHITE [star s] technology [s] @@ # Ex: american\_ethnic\_studies _field [layer=(_Caps )] <- american [s] _xWHITE [star s] ethnic [s] _xWHITE [star s] studies [s] @@ # Ex: air\_force\_rotc _field [layer=(_Caps )] <- air [s] _xWHITE [star s] force [s] _xWHITE [star s] rotc [s] @@ # Ex: agricultural\_technology\_management _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] technology [s] _xWHITE [star s] management [s] @@ # Ex: agricultural\_operations\_management _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] operations [s] _xWHITE [star s] management [s] @@ # Ex: aeronautics\_and\_astronautics _field [layer=(_Caps )] <- aeronautics [s] _xWHITE [star s] and [s] _xWHITE [star s] astronautics [s] @@ # Ex: adapted\_physical\_education _field [layer=(_Caps )] <- adapted [s] _xWHITE [star s] physical [s] _xWHITE [star s] education [s] @@ # Ex: workforce\_education _field [layer=(_Caps )] <- workforce [s] _xWHITE [star s] education [s] @@ # Ex: wildlife\_science _field [layer=(_Caps )] <- wildlife [s] _xWHITE [star s] science [s] @@ # Ex: wildlife\_ecology _field [layer=(_Caps )] <- wildlife [s] _xWHITE [star s] ecology [s] @@ # Ex: voice\_performance _field [layer=(_Caps )] <- voice [s] _xWHITE [star s] performance [s] @@ # Ex: visual\_arts _field [layer=(_Caps )] <- visual [s] _xWHITE [star s] arts [s] @@ # Ex: veterinary\_science _field [layer=(_Caps )] <- veterinary [s] _xWHITE [star s] science [s] @@ # Ex: veterinary\_medicine _field [layer=(_Caps )] <- veterinary [s] _xWHITE [star s] medicine [s] @@ # Ex: urban\_studies _field [layer=(_Caps )] <- urban [s] _xWHITE [star s] studies [s] @@ # Ex: urban\_planning _field [layer=(_Caps )] <- urban [s] _xWHITE [star s] planning [s] @@ # Ex: urban\_development _field [layer=(_Caps )] <- urban [s] _xWHITE [star s] development [s] @@ # Ex: transportation\_systems _field [layer=(_Caps )] <- transportation [s] _xWHITE [star s] systems [s] @@ # Ex: tourism\_administration _field [layer=(_Caps )] <- tourism [s] _xWHITE [star s] administration [s] @@ # Ex: theatre\_arts _field [layer=(_Caps )] <- theatre [s] _xWHITE [star s] arts [s] @@ # Ex: textile\_design _field [layer=(_Caps )] <- textile [s] _xWHITE [star s] design [s] @@ # Ex: telecommunications\_studies _field [layer=(_Caps )] <- telecommunications [s] _xWHITE [star s] studies [s] @@ # Ex: teacher\_education _field [layer=(_Caps )] <- teacher [s] _xWHITE [star s] education [s] @@ # Ex: systems\_engineering _field [layer=(_Caps )] <- systems [s] _xWHITE [star s] engineering [s] @@ # Ex: student\_counseling _field [layer=(_Caps )] <- student [s] _xWHITE [star s] counseling [s] @@ # Ex: structural\_mechanics _field [layer=(_Caps )] <- structural [s] _xWHITE [star s] mechanics [s] @@ # Ex: sports\_medicine _field [layer=(_Caps )] <- sports [s] _xWHITE [star s] medicine [s] @@ # Ex: sport\_psychology _field [layer=(_Caps )] <- sport [s] _xWHITE [star s] psychology [s] @@ # Ex: speech\_pathology _field [layer=(_Caps )] <- speech [s] _xWHITE [star s] pathology [s] @@ # Ex: speech\_communication _field [layer=(_Caps )] <- speech [s] _xWHITE [star s] communication [s] @@ # Ex: special\_education _field [layer=(_Caps )] <- special [s] _xWHITE [star s] education [s] @@ # Ex: solid\_state _field [layer=(_Caps )] <- solid [s] _xWHITE [star s] state [s] @@ # Ex: soil\_science _field [layer=(_Caps )] <- soil [s] _xWHITE [star s] science [s] @@ # Ex: software\_engineering _field [layer=(_Caps )] <- software [s] _xWHITE [star s] engineering [s] @@ # Ex: social\_work _field [layer=(_Caps )] <- social [s] _xWHITE [star s] work [s] @@ # Ex: social\_studies _field [layer=(_Caps )] <- social [s] _xWHITE [star s] studies [s] @@ # Ex: slavic\_languages _field [layer=(_Caps )] <- slavic [s] _xWHITE [star s] languages [s] @@ # Ex: science\_education _field [layer=(_Caps )] <- science [s] _xWHITE [star s] education [s] @@ # Ex: school\_psychology _field [layer=(_Caps )] <- school [s] _xWHITE [star s] psychology [s] @@ # Ex: russian\_studies _field [layer=(_Caps )] <- russian [s] _xWHITE [star s] studies [s] @@ # Ex: rural\_sociology _field [layer=(_Caps )] <- rural [s] _xWHITE [star s] sociology [s] @@ # Ex: rotc\_programs _field [layer=(_Caps )] <- rotc [s] _xWHITE [star s] programs [s] @@ # Ex: romance\_languages _field [layer=(_Caps )] <- romance [s] _xWHITE [star s] languages [s] @@ # Ex: restaurant\_management _field [layer=(_Caps )] <- restaurant [s] _xWHITE [star s] management [s] @@ # Ex: respiratory\_technology _field [layer=(_Caps )] <- respiratory [s] _xWHITE [star s] technology [s] @@ # Ex: religious\_studies _field [layer=(_Caps )] <- religious [s] _xWHITE [star s] studies [s] @@ # Ex: rehabilitation\_science _field [layer=(_Caps )] <- rehabilitation [s] _xWHITE [star s] science [s] @@ # Ex: rehabilitation\_counseling _field [layer=(_Caps )] <- rehabilitation [s] _xWHITE [star s] counseling [s] @@ # Ex: recreation\_resources _field [layer=(_Caps )] <- recreation [s] _xWHITE [star s] resources [s] @@ # Ex: recreation\_management _field [layer=(_Caps )] <- recreation [s] _xWHITE [star s] management [s] @@ # Ex: real\_estate _field [layer=(_Caps )] <- real [s] _xWHITE [star s] estate [s] @@ # Ex: reading\_education _field [layer=(_Caps )] <- reading [s] _xWHITE [star s] education [s] @@ # Ex: radiological\_engineering _field [layer=(_Caps )] <- radiological [s] _xWHITE [star s] engineering [s] @@ # Ex: radiologic\_technology _field [layer=(_Caps )] <- radiologic [s] _xWHITE [star s] technology [s] @@ # Ex: quantitative\_analysis _field [layer=(_Caps )] <- quantitative [s] _xWHITE [star s] analysis [s] @@ # Ex: public\_relations _field [layer=(_Caps )] <- public [s] _xWHITE [star s] relations [s] @@ # Ex: public\_policy _field [layer=(_Caps )] <- public [s] _xWHITE [star s] policy [s] @@ # Ex: public\_health _field [layer=(_Caps )] <- public [s] _xWHITE [star s] health [s] @@ # Ex: public\_administration _field [layer=(_Caps )] <- public [s] _xWHITE [star s] administration [s] @@ # Ex: polymer\_science _field [layer=(_Caps )] <- polymer [s] _xWHITE [star s] science [s] @@ # Ex: political\_science _field [layer=(_Caps )] <- political [s] _xWHITE [star s] science [s] @@ # Ex: police\_science _field [layer=(_Caps )] <- police [s] _xWHITE [star s] science [s] @@ # Ex: plant\_sciences _field [layer=(_Caps )] <- plant [s] _xWHITE [star s] sciences [s] @@ # Ex: plant\_physiology _field [layer=(_Caps )] <- plant [s] _xWHITE [star s] physiology [s] @@ # Ex: plant\_pathology _field [layer=(_Caps )] <- plant [s] _xWHITE [star s] pathology [s] @@ # Ex: planetary\_sciences _field [layer=(_Caps )] <- planetary [s] _xWHITE [star s] sciences [s] @@ # Ex: physical\_therapy _field [layer=(_Caps )] <- physical [s] _xWHITE [star s] therapy [s] @@ # Ex: physical\_therapist _field [layer=(_Caps )] <- physical [s] _xWHITE [star s] therapist [s] @@ # Ex: physical\_education _field [layer=(_Caps )] <- physical [s] _xWHITE [star s] education [s] @@ # Ex: pharmacy\_health _field [layer=(_Caps )] <- pharmacy [s] _xWHITE [star s] health [s] @@ # Ex: pharmaceutical\_sciences _field [layer=(_Caps )] <- pharmaceutical [s] _xWHITE [star s] sciences [s] @@ # Ex: pest\_management _field [layer=(_Caps )] <- pest [s] _xWHITE [star s] management [s] @@ # Ex: park\_management _field [layer=(_Caps )] <- park [s] _xWHITE [star s] management [s] @@ # Ex: oral\_biology _field [layer=(_Caps )] <- oral [s] _xWHITE [star s] biology [s] @@ # Ex: operations\_research _field [layer=(_Caps )] <- operations [s] _xWHITE [star s] research [s] @@ # Ex: operations\_management _field [layer=(_Caps )] <- operations [s] _xWHITE [star s] management [s] @@ # Ex: office\_technology _field [layer=(_Caps )] <- office [s] _xWHITE [star s] technology [s] @@ # Ex: ocean\_engineering _field [layer=(_Caps )] <- ocean [s] _xWHITE [star s] engineering [s] @@ # Ex: occupational\_therapy _field [layer=(_Caps )] <- occupational [s] _xWHITE [star s] therapy [s] @@ # Ex: occupational\_development _field [layer=(_Caps )] <- occupational [s] _xWHITE [star s] development [s] @@ # Ex: nuclear\_engineering _field [layer=(_Caps )] <- nuclear [s] _xWHITE [star s] engineering [s] @@ # Ex: navy\_rotc _field [layer=(_Caps )] <- navy [s] _xWHITE [star s] rotc [s] @@ # Ex: motor\_learning _field [layer=(_Caps )] <- motor [s] _xWHITE [star s] learning [s] @@ # Ex: molecular\_genetics _field [layer=(_Caps )] <- molecular [s] _xWHITE [star s] genetics [s] @@ # Ex: molecular\_biology _field [layer=(_Caps )] <- molecular [s] _xWHITE [star s] biology [s] @@ # Ex: modern\_languages _field [layer=(_Caps )] <- modern [s] _xWHITE [star s] languages [s] @@ # Ex: mining\_engineering _field [layer=(_Caps )] <- mining [s] _xWHITE [star s] engineering [s] @@ # Ex: mineral\_processing _field [layer=(_Caps )] <- mineral [s] _xWHITE [star s] processing [s] @@ # Ex: mineral\_engineering _field [layer=(_Caps )] <- mineral [s] _xWHITE [star s] engineering [s] @@ # Ex: mineral\_economics _field [layer=(_Caps )] <- mineral [s] _xWHITE [star s] economics [s] @@ # Ex: military\_science _field [layer=(_Caps )] <- military [s] _xWHITE [star s] science [s] @@ # Ex: metals\_science _field [layer=(_Caps )] <- metals [s] _xWHITE [star s] science [s] @@ # Ex: medicinal\_chemistry _field [layer=(_Caps )] <- medicinal [s] _xWHITE [star s] chemistry [s] @@ # Ex: medical\_sciences _field [layer=(_Caps )] <- medical [s] _xWHITE [star s] sciences [s] @@ # Ex: medical\_school _field [layer=(_Caps )] <- medical [s] _xWHITE [star s] school [s] @@ # Ex: medical\_assistance _field [layer=(_Caps )] <- medical [s] _xWHITE [star s] assistance [s] @@ # Ex: media\_studies _field [layer=(_Caps )] <- media [s] _xWHITE [star s] studies [s] @@ # Ex: media\_arts _field [layer=(_Caps )] <- media [s] _xWHITE [star s] arts [s] @@ # Ex: mechanical\_engineering _field [layer=(_Caps )] <- mechanical [s] _xWHITE [star s] engineering [s] @@ # Ex: mathematics\_education _field [layer=(_Caps )] <- mathematics [s] _xWHITE [star s] education [s] @@ # Ex: materials\_science _field [layer=(_Caps )] <- materials [s] _xWHITE [star s] science [s] @@ # Ex: materials\_engineering _field [layer=(_Caps )] <- materials [s] _xWHITE [star s] engineering [s] @@ # Ex: mass\_communications _field [layer=(_Caps )] <- mass [s] _xWHITE [star s] communications [s] @@ # Ex: mass\_communication _field [layer=(_Caps )] <- mass [s] _xWHITE [star s] communication [s] @@ # Ex: manufacturing\_engineering _field [layer=(_Caps )] <- manufacturing [s] _xWHITE [star s] engineering [s] @@ # Ex: management\_technology _field [layer=(_Caps )] <- management [s] _xWHITE [star s] technology [s] @@ # Ex: management\_science _field [layer=(_Caps )] <- management [s] _xWHITE [star s] science [s] @@ # Ex: management\_information _field [layer=(_Caps )] <- management [s] _xWHITE [star s] information [s] @@ # Ex: literary\_theory _field [layer=(_Caps )] <- literary [s] _xWHITE [star s] theory [s] @@ # Ex: literary\_criticism _field [layer=(_Caps )] <- literary [s] _xWHITE [star s] criticism [s] @@ # Ex: liberal\_arts _field [layer=(_Caps )] <- liberal [s] _xWHITE [star s] arts [s] @@ # Ex: leisure\_studies _field [layer=(_Caps )] <- leisure [s] _xWHITE [star s] studies [s] @@ # Ex: legal\_assistance _field [layer=(_Caps )] <- legal [s] _xWHITE [star s] assistance [s] @@ # Ex: law\_enforcement _field [layer=(_Caps )] <- law [s] _xWHITE [star s] enforcement [s] @@ # Ex: landscape\_architecture _field [layer=(_Caps )] <- landscape [s] _xWHITE [star s] architecture [s] @@ # Ex: labor\_studies _field [layer=(_Caps )] <- labor [s] _xWHITE [star s] studies [s] @@ # Ex: labor\_relations _field [layer=(_Caps )] <- labor [s] _xWHITE [star s] relations [s] @@ # Ex: jewish\_studies _field [layer=(_Caps )] <- jewish [s] _xWHITE [star s] studies [s] @@ # Ex: international\_studies _field [layer=(_Caps )] <- international [s] _xWHITE [star s] studies [s] @@ # Ex: international\_business _field [layer=(_Caps )] <- international [s] _xWHITE [star s] business [s] @@ # Ex: interior\_design _field [layer=(_Caps )] <- interior [s] _xWHITE [star s] design [s] @@ # Ex: interior\_architecture _field [layer=(_Caps )] <- interior [s] _xWHITE [star s] architecture [s] @@ # Ex: interdisciplinary\_studies _field [layer=(_Caps )] <- interdisciplinary [s] _xWHITE [star s] studies [s] @@ # Ex: instructional\_technology _field [layer=(_Caps )] <- instructional [s] _xWHITE [star s] technology [s] @@ # Ex: institutional\_management _field [layer=(_Caps )] <- institutional [s] _xWHITE [star s] management [s] @@ # Ex: information\_systems _field [layer=(_Caps )] <- information [s] _xWHITE [star s] systems [s] @@ # Ex: information\_science _field [layer=(_Caps )] <- information [s] _xWHITE [star s] science [s] @@ # Ex: information\_management _field [layer=(_Caps )] <- information [s] _xWHITE [star s] management [s] @@ # Ex: industrial\_relations _field [layer=(_Caps )] <- industrial [s] _xWHITE [star s] relations [s] @@ # Ex: industrial\_engineering _field [layer=(_Caps )] <- industrial [s] _xWHITE [star s] engineering [s] @@ # Ex: individual\_studies _field [layer=(_Caps )] <- individual [s] _xWHITE [star s] studies [s] @@ # Ex: human\_resources _field [layer=(_Caps )] <- human [s] _xWHITE [star s] resources [s] @@ # Ex: human\_nutrition _field [layer=(_Caps )] <- human [s] _xWHITE [star s] nutrition [s] @@ # Ex: human\_ecology _field [layer=(_Caps )] <- human [s] _xWHITE [star s] ecology [s] @@ # Ex: human\_development _field [layer=(_Caps )] <- human [s] _xWHITE [star s] development [s] @@ # Ex: hotel\_management _field [layer=(_Caps )] <- hotel [s] _xWHITE [star s] management [s] @@ # Ex: hospitality\_management _field [layer=(_Caps )] <- hospitality [s] _xWHITE [star s] management [s] @@ # Ex: horticultural\_sciences _field [layer=(_Caps )] <- horticultural [s] _xWHITE [star s] sciences [s] @@ # Ex: horticultural\_science _field [layer=(_Caps )] <- horticultural [s] _xWHITE [star s] science [s] @@ # Ex: hebrew\_studies _field [layer=(_Caps )] <- hebrew [s] _xWHITE [star s] studies [s] @@ # Ex: health\_services _field [layer=(_Caps )] <- health [s] _xWHITE [star s] services [s] @@ # Ex: health\_sciences _field [layer=(_Caps )] <- health [s] _xWHITE [star s] sciences [s] @@ # Ex: health\_science _field [layer=(_Caps )] <- health [s] _xWHITE [star s] science [s] @@ # Ex: health\_policy _field [layer=(_Caps )] <- health [s] _xWHITE [star s] policy [s] @@ # Ex: health\_information _field [layer=(_Caps )] <- health [s] _xWHITE [star s] information [s] @@ # Ex: health\_education _field [layer=(_Caps )] <- health [s] _xWHITE [star s] education [s] @@ # Ex: health\_administration _field [layer=(_Caps )] <- health [s] _xWHITE [star s] administration [s] @@ # Ex: grain\_science _field [layer=(_Caps )] <- grain [s] _xWHITE [star s] science [s] @@ # Ex: gerontological\_studies _field [layer=(_Caps )] <- gerontological [s] _xWHITE [star s] studies [s] @@ # Ex: germanic\_studies _field [layer=(_Caps )] <- germanic [s] _xWHITE [star s] studies [s] @@ # Ex: general\_studies _field [layer=(_Caps )] <- general [s] _xWHITE [star s] studies [s] @@ # Ex: forest\_resources _field [layer=(_Caps )] <- forest [s] _xWHITE [star s] resources [s] @@ # Ex: foreign\_languages _field [layer=(_Caps )] <- foreign [s] _xWHITE [star s] languages [s] @@ # Ex: foreign\_affairs _field [layer=(_Caps )] <- foreign [s] _xWHITE [star s] affairs [s] @@ # Ex: food\_service _field [layer=(_Caps )] <- food [s] _xWHITE [star s] service [s] @@ # Ex: food\_science _field [layer=(_Caps )] <- food [s] _xWHITE [star s] science [s] @@ # Ex: fisheries\_science _field [layer=(_Caps )] <- fisheries [s] _xWHITE [star s] science [s] @@ # Ex: fine\_arts _field [layer=(_Caps )] <- fine [s] _xWHITE [star s] arts [s] @@ # Ex: film\_studies _field [layer=(_Caps )] <- film [s] _xWHITE [star s] studies [s] @@ # Ex: fashion\_design _field [layer=(_Caps )] <- fashion [s] _xWHITE [star s] design [s] @@ # Ex: family\_studies _field [layer=(_Caps )] <- family [s] _xWHITE [star s] studies [s] @@ # Ex: exercise\_physiology _field [layer=(_Caps )] <- exercise [s] _xWHITE [star s] physiology [s] @@ # Ex: european\_history _field [layer=(_Caps )] <- european [s] _xWHITE [star s] history [s] @@ # Ex: ethnic\_studies _field [layer=(_Caps )] <- ethnic [s] _xWHITE [star s] studies [s] @@ # Ex: environmental\_studies _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] studies [s] @@ # Ex: environmental\_sciences _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] sciences [s] @@ # Ex: environmental\_science _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] science [s] @@ # Ex: environmental\_planning _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] planning [s] @@ # Ex: environmental\_health _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] health [s] @@ # Ex: environmental\_engineering _field [layer=(_Caps )] <- environmental [s] _xWHITE [star s] engineering [s] @@ # Ex: english\_education _field [layer=(_Caps )] <- english [s] _xWHITE [star s] education [s] @@ # Ex: engineering\_technology _field [layer=(_Caps )] <- engineering [s] _xWHITE [star s] technology [s] @@ # Ex: engineering\_science _field [layer=(_Caps )] <- engineering [s] _xWHITE [star s] science [s] @@ # Ex: engineering\_physics _field [layer=(_Caps )] <- engineering [s] _xWHITE [star s] physics [s] @@ # Ex: engineering\_mechanics _field [layer=(_Caps )] <- engineering [s] _xWHITE [star s] mechanics [s] @@ # Ex: engineering\_management _field [layer=(_Caps )] <- engineering [s] _xWHITE [star s] management [s] @@ # Ex: energy\_engineering _field [layer=(_Caps )] <- energy [s] _xWHITE [star s] engineering [s] @@ # Ex: elementary\_education _field [layer=(_Caps )] <- elementary [s] _xWHITE [star s] education [s] @@ # Ex: electronics\_technology _field [layer=(_Caps )] <- electronics [s] _xWHITE [star s] technology [s] @@ # Ex: electrical\_engineering _field [layer=(_Caps )] <- electrical [s] _xWHITE [star s] engineering [s] @@ # Ex: educational\_research _field [layer=(_Caps )] <- educational [s] _xWHITE [star s] research [s] @@ # Ex: educational\_psychology _field [layer=(_Caps )] <- educational [s] _xWHITE [star s] psychology [s] @@ # Ex: educational\_policy _field [layer=(_Caps )] <- educational [s] _xWHITE [star s] policy [s] @@ # Ex: educational\_leadership _field [layer=(_Caps )] <- educational [s] _xWHITE [star s] leadership [s] @@ # Ex: educational\_evaluation _field [layer=(_Caps )] <- educational [s] _xWHITE [star s] evaluation [s] @@ # Ex: educational\_administration _field [layer=(_Caps )] <- educational [s] _xWHITE [star s] administration [s] @@ # Ex: earth\_sciences _field [layer=(_Caps )] <- earth [s] _xWHITE [star s] sciences [s] @@ # Ex: developmental\_biology _field [layer=(_Caps )] <- developmental [s] _xWHITE [star s] biology [s] @@ # Ex: dental\_sciences _field [layer=(_Caps )] <- dental [s] _xWHITE [star s] sciences [s] @@ # Ex: criminal\_justice _field [layer=(_Caps )] <- criminal [s] _xWHITE [star s] justice [s] @@ # Ex: creative\_writing _field [layer=(_Caps )] <- creative [s] _xWHITE [star s] writing [s] @@ # Ex: counselor\_education _field [layer=(_Caps )] <- counselor [s] _xWHITE [star s] education [s] @@ # Ex: cooperative\_education _field [layer=(_Caps )] <- cooperative [s] _xWHITE [star s] education [s] @@ # Ex: continuing\_education _field [layer=(_Caps )] <- continuing [s] _xWHITE [star s] education [s] @@ # Ex: computer\_programming _field [layer=(_Caps )] <- computer [s] _xWHITE [star s] programming [s] @@ # Ex: computer\_technology _field [layer=(_Caps )] <- computer [s] _xWHITE [star s] technology [s] @@ # Ex: computer\_science _field [layer=(_Caps )] <- computer [s] _xWHITE [star s] science [s] @@ # Ex: computer\_engineering _field [layer=(_Caps )] <- computer [s] _xWHITE [star s] engineering [s] @@ # Ex: comparative\_literature _field [layer=(_Caps )] <- comparative [s] _xWHITE [star s] literature [s] @@ # Ex: comparative\_law _field [layer=(_Caps )] <- comparative [s] _xWHITE [star s] law [s] @@ # Ex: community\_studies _field [layer=(_Caps )] <- community [s] _xWHITE [star s] studies [s] @@ # Ex: community\_education _field [layer=(_Caps )] <- community [s] _xWHITE [star s] education [s] @@ # Ex: communicative\_disorders _field [layer=(_Caps )] <- communicative [s] _xWHITE [star s] disorders [s] @@ # Ex: communication\_sciences _field [layer=(_Caps )] <- communication [s] _xWHITE [star s] sciences [s] @@ # Ex: communication\_disorders _field [layer=(_Caps )] <- communication [s] _xWHITE [star s] disorders [s] @@ # Ex: cognitive\_sciences _field [layer=(_Caps )] <- cognitive [s] _xWHITE [star s] sciences [s] @@ # Ex: cognitive\_science _field [layer=(_Caps )] <- cognitive [s] _xWHITE [star s] science [s] @@ # Ex: clinical\_sciences _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] sciences [s] @@ # Ex: clinical\_psychology _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] psychology [s] @@ # Ex: clinical\_ethics _field [layer=(_Caps )] <- clinical [s] _xWHITE [star s] ethics [s] @@ # Ex: clinch\_valley _field [layer=(_Caps )] <- clinch [s] _xWHITE [star s] valley [s] @@ # Ex: civil\_engineering _field [layer=(_Caps )] <- civil [s] _xWHITE [star s] engineering [s] @@ # Ex: city\_planning _field [layer=(_Caps )] <- city [s] _xWHITE [star s] planning [s] @@ # Ex: chemical\_engineering _field [layer=(_Caps )] <- chemical [s] _xWHITE [star s] engineering [s] @@ # Ex: ceramic\_science _field [layer=(_Caps )] <- ceramic [s] _xWHITE [star s] science [s] @@ # Ex: business\_logistics _field [layer=(_Caps )] <- business [s] _xWHITE [star s] logistics [s] @@ # Ex: business\_law _field [layer=(_Caps )] <- business [s] _xWHITE [star s] law [s] @@ # Ex: business\_administration _field [layer=(_Caps )] <- business [s] _xWHITE [star s] administration [s] @@ # Ex: building\_construction _field [layer=(_Caps )] <- building [s] _xWHITE [star s] construction [s] @@ # Ex: biomedical\_engineering _field [layer=(_Caps )] <- biomedical [s] _xWHITE [star s] engineering [s] @@ # Ex: biological\_sciences _field [layer=(_Caps )] <- biological [s] _xWHITE [star s] sciences [s] @@ # Ex: biological\_chemistry _field [layer=(_Caps )] <- biological [s] _xWHITE [star s] chemistry [s] @@ # Ex: bilingual\_education _field [layer=(_Caps )] <- bilingual [s] _xWHITE [star s] education [s] @@ # Ex: aviation\_technology _field [layer=(_Caps )] <- aviation [s] _xWHITE [star s] technology [s] @@ # Ex: automotive\_technology _field [layer=(_Caps )] <- automotive [s] _xWHITE [star s] technology [s] @@ # Ex: atmospheric\_sciences _field [layer=(_Caps )] <- atmospheric [s] _xWHITE [star s] sciences [s] @@ # Ex: athletic\_training _field [layer=(_Caps )] <- athletic [s] _xWHITE [star s] training [s] @@ # Ex: asian\_studies _field [layer=(_Caps )] <- asian [s] _xWHITE [star s] studies [s] @@ # Ex: asian\_languages _field [layer=(_Caps )] <- asian [s] _xWHITE [star s] languages [s] @@ # Ex: asian\_history _field [layer=(_Caps )] <- asian [s] _xWHITE [star s] history [s] @@ # Ex: art\_history _field [layer=(_Caps )] <- art [s] _xWHITE [star s] history [s] @@ # Ex: art\_education _field [layer=(_Caps )] <- art [s] _xWHITE [star s] education [s] @@ # Ex: army\_rotc _field [layer=(_Caps )] <- army [s] _xWHITE [star s] rotc [s] @@ # Ex: architecture\_school _field [layer=(_Caps )] <- architecture [s] _xWHITE [star s] school [s] @@ # Ex: architectural\_history _field [layer=(_Caps )] <- architectural [s] _xWHITE [star s] history [s] @@ # Ex: architectural\_engineering _field [layer=(_Caps )] <- architectural [s] _xWHITE [star s] engineering [s] @@ # Ex: applied\_mechanics _field [layer=(_Caps )] <- applied [s] _xWHITE [star s] mechanics [s] @@ # Ex: applied\_mathematics _field [layer=(_Caps )] <- applied [s] _xWHITE [star s] mathematics [s] @@ # Ex: apparel\_textiles _field [layer=(_Caps )] <- apparel [s] _xWHITE [star s] textiles [s] @@ # Ex: apparel\_design _field [layer=(_Caps )] <- apparel [s] _xWHITE [star s] design [s] @@ # Ex: animal\_sciences _field [layer=(_Caps )] <- animal [s] _xWHITE [star s] sciences [s] @@ # Ex: animal\_science _field [layer=(_Caps )] <- animal [s] _xWHITE [star s] science [s] @@ # Ex: animal\_nutrition _field [layer=(_Caps )] <- animal [s] _xWHITE [star s] nutrition [s] @@ # Ex: animal\_medicine _field [layer=(_Caps )] <- animal [s] _xWHITE [star s] medicine [s] @@ # Ex: animal\_husbandry _field [layer=(_Caps )] <- animal [s] _xWHITE [star s] husbandry [s] @@ # Ex: american\_studies _field [layer=(_Caps )] <- american [s] _xWHITE [star s] studies [s] @@ # Ex: american\_history _field [layer=(_Caps )] <- american [s] _xWHITE [star s] history [s] @@ # Ex: american\_ethnic _field [layer=(_Caps )] <- american [s] _xWHITE [star s] ethnic [s] @@ # Ex: agricultural\_technology _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] technology [s] @@ # Ex: agricultural\_operations _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] operations [s] @@ # Ex: agricultural\_engineering _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] engineering [s] @@ # Ex: agricultural\_education _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] education [s] @@ # Ex: agricultural\_economics _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] economics [s] @@ # Ex: agricultural\_communications _field [layer=(_Caps )] <- agricultural [s] _xWHITE [star s] communications [s] @@ # Ex: african\_studies _field [layer=(_Caps )] <- african [s] _xWHITE [star s] studies [s] @@ # Ex: aerospace\_engineering _field [layer=(_Caps )] <- aerospace [s] _xWHITE [star s] engineering [s] @@ # Ex: aeronautical\_engineering _field [layer=(_Caps )] <- aeronautical [s] _xWHITE [star s] engineering [s] @@ # Ex: adult\_education _field [layer=(_Caps )] <- adult [s] _xWHITE [star s] education [s] @@ # Ex: zoology _field [layer=(_Caps )] <- zoology [s] @@ # Ex: writing _field [layer=(_Caps )] <- writing [s] @@ # Ex: tourism _field [layer=(_Caps )] <- tourism [s] @@ # Ex: theology _field [layer=(_Caps )] <- theology [s] @@ # Ex: theatre _field [layer=(_Caps )] <- theatre [s] @@ # Ex: theater _field [layer=(_Caps )] <- theater [s] @@ # Ex: tesl _field [layer=(_Caps )] <- tesl [s] @@ # Ex: telemedicine _field [layer=(_Caps )] <- telemedicine [s] @@ # Ex: telecommunications _field [layer=(_Caps )] <- telecommunications [s] @@ # Ex: teaching _field [layer=(_Caps )] <- teaching [s] @@ # Ex: taxation _field [layer=(_Caps )] <- taxation [s] @@ # Ex: statistics _field [layer=(_Caps )] <- statistics [s] @@ # Ex: spanish _field [layer=(_Caps )] <- spanish [s] @@ # Ex: sociology _field [layer=(_Caps )] <- sociology [s] @@ # Ex: slavic _field [layer=(_Caps )] <- slavic [s] @@ # Ex: science _field [layer=(_Caps )] <- science [s] @@ # Ex: school _field [layer=(_Caps )] <- school [s] @@ # Ex: russian _field [layer=(_Caps )] <- russian [s] @@ # Ex: religion _field [layer=(_Caps )] <- religion [s] @@ # Ex: rehabilitation _field [layer=(_Caps )] <- rehabilitation [s] @@ # Ex: recreation _field [layer=(_Caps )] <- recreation [s] @@ # Ex: psychology _field [layer=(_Caps )] <- psychology [s] @@ # Ex: portuguese _field [layer=(_Caps )] <- portuguese [s] @@ # Ex: physiology _field [layer=(_Caps )] <- physiology [s] @@ # Ex: physics _field [layer=(_Caps )] <- physics [s] @@ # Ex: philosophy _field [layer=(_Caps )] <- philosophy [s] @@ # Ex: pharmacy _field [layer=(_Caps )] <- pharmacy [s] @@ # Ex: pharmacology _field [layer=(_Caps )] <- pharmacology [s] @@ # Ex: pharmaceutics _field [layer=(_Caps )] <- pharmaceutics [s] @@ # Ex: pediatrics _field [layer=(_Caps )] <- pediatrics [s] @@ # Ex: pedagogy _field [layer=(_Caps )] <- pedagogy [s] @@ # Ex: pathology _field [layer=(_Caps )] <- pathology [s] @@ # Ex: pathobiology _field [layer=(_Caps )] <- pathobiology [s] @@ # Ex: optometry _field [layer=(_Caps )] <- optometry [s] @@ # Ex: optics _field [layer=(_Caps )] <- optics [s] @@ # Ex: operations _field [layer=(_Caps )] <- operations [s] @@ # Ex: oceanography _field [layer=(_Caps )] <- oceanography [s] @@ # Ex: nutrition _field [layer=(_Caps )] <- nutrition [s] @@ # Ex: neuroscience _field [layer=(_Caps )] <- neuroscience [s] @@ # Ex: nematology _field [layer=(_Caps )] <- nematology [s] @@ # Ex: music _field [layer=(_Caps )] <- music [s] @@ # Ex: mining _field [layer=(_Caps )] <- mining [s] @@ # Ex: mineralogy _field [layer=(_Caps )] <- mineralogy [s] @@ # Ex: microbiology _field [layer=(_Caps )] <- microbiology [s] @@ # Ex: meteorology _field [layer=(_Caps )] <- meteorology [s] @@ # Ex: metals _field [layer=(_Caps )] <- metals [s] @@ # Ex: medicine _field [layer=(_Caps )] <- medicine [s] @@ # Ex: media _field [layer=(_Caps )] <- media [s] @@ # Ex: mechanics _field [layer=(_Caps )] <- mechanics [s] @@ # Ex: mathematics _field [layer=(_Caps )] <- mathematics [s] @@ # Ex: math _field [layer=(_Caps )] <- math [s] @@ # Ex: marketing _field [layer=(_Caps )] <- marketing [s] @@ # Ex: manufacturing _field [layer=(_Caps )] <- manufacturing [s] @@ # Ex: management _field [layer=(_Caps )] <- management [s] @@ # Ex: logistics _field [layer=(_Caps )] <- logistics [s] @@ # Ex: lithuanian _field [layer=(_Caps )] <- lithuanian [s] @@ # Ex: literature _field [layer=(_Caps )] <- literature [s] @@ # Ex: linguistics _field [layer=(_Caps )] <- linguistics [s] @@ # Ex: law _field [layer=(_Caps )] <- law [s] @@ # Ex: latin _field [layer=(_Caps )] <- latin [s] @@ # Ex: languages _field [layer=(_Caps )] <- languages [s] @@ # Ex: kinesiology _field [layer=(_Caps )] <- kinesiology [s] @@ # Ex: journalism _field [layer=(_Caps )] <- journalism [s] @@ # Ex: japanese _field [layer=(_Caps )] <- japanese [s] @@ # Ex: italian _field [layer=(_Caps )] <- italian [s] @@ # Ex: insurance _field [layer=(_Caps )] <- insurance [s] @@ # Ex: immunology _field [layer=(_Caps )] <- immunology [s] @@ # Ex: humanities _field [layer=(_Caps )] <- humanities [s] @@ # Ex: hospitality _field [layer=(_Caps )] <- hospitality [s] @@ # Ex: horticulture _field [layer=(_Caps )] <- horticulture [s] @@ # Ex: history _field [layer=(_Caps )] <- history [s] @@ # Ex: hebrew _field [layer=(_Caps )] <- hebrew [s] @@ # Ex: greek _field [layer=(_Caps )] <- greek [s] @@ # Ex: government _field [layer=(_Caps )] <- government [s] @@ # Ex: gerontology _field [layer=(_Caps )] <- gerontology [s] @@ # Ex: german _field [layer=(_Caps )] <- german [s] @@ # Ex: geosciences _field [layer=(_Caps )] <- geosciences [s] @@ # Ex: geophysics _field [layer=(_Caps )] <- geophysics [s] @@ # Ex: geology _field [layer=(_Caps )] <- geology [s] @@ # Ex: geography _field [layer=(_Caps )] <- geography [s] @@ # Ex: geochemistry _field [layer=(_Caps )] <- geochemistry [s] @@ # Ex: genetics _field [layer=(_Caps )] <- genetics [s] @@ # Ex: french _field [layer=(_Caps )] <- french [s] @@ # Ex: forestry _field [layer=(_Caps )] <- forestry [s] @@ # Ex: finance _field [layer=(_Caps )] <- finance [s] @@ # Ex: film _field [layer=(_Caps )] <- film [s] @@ # Ex: fashion _field [layer=(_Caps )] <- fashion [s] @@ # Ex: ethics _field [layer=(_Caps )] <- ethics [s] @@ # Ex: epidemiology _field [layer=(_Caps )] <- epidemiology [s] @@ # Ex: entomology _field [layer=(_Caps )] <- entomology [s] @@ # Ex: english _field [layer=(_Caps )] <- english [s] @@ # Ex: engineering _field [layer=(_Caps )] <- engineering [s] @@ # Ex: electronics _field [layer=(_Caps )] <- electronics [s] @@ # Ex: eecs _field [layer=(_Caps )] <- eecs [s] @@ # Ex: ee _field [layer=(_Caps )] <- ee [s] @@ # Ex: economics _field [layer=(_Caps )] <- economics [s] @@ # Ex: econometrics _field [layer=(_Caps )] <- econometrics [s] @@ # Ex: ecology _field [layer=(_Caps )] <- ecology [s] @@ # Ex: drama _field [layer=(_Caps )] <- drama [s] @@ # Ex: dietetics _field [layer=(_Caps )] <- dietetics [s] @@ # Ex: design _field [layer=(_Caps )] <- design [s] @@ # Ex: dermatology _field [layer=(_Caps )] <- dermatology [s] @@ # Ex: dentistry _field [layer=(_Caps )] <- dentistry [s] @@ # Ex: demography _field [layer=(_Caps )] <- demography [s] @@ # Ex: cs _field [layer=(_Caps )] <- cs [s] @@ # Ex: counseling _field [layer=(_Caps )] <- counseling [s] @@ # Ex: communications _field [layer=(_Caps )] <- communications [s] @@ # Ex: climatology _field [layer=(_Caps )] <- climatology [s] @@ # Ex: classics _field [layer=(_Caps )] <- classics [s] @@ # Ex: chinese _field [layer=(_Caps )] <- chinese [s] @@ # Ex: chemistry _field [layer=(_Caps )] <- chemistry [s] @@ # Ex: business _field [layer=(_Caps )] <- business [s] @@ # Ex: botany _field [layer=(_Caps )] <- botany [s] @@ # Ex: biophysics _field [layer=(_Caps )] <- biophysics [s] @@ # Ex: biology _field [layer=(_Caps )] <- biology [s] @@ # Ex: bioengineering _field [layer=(_Caps )] <- bioengineering [s] @@ # Ex: biochemistry _field [layer=(_Caps )] <- biochemistry [s] @@ # Ex: banking _field [layer=(_Caps )] <- banking [s] @@ # Ex: aviation _field [layer=(_Caps )] <- aviation [s] @@ # Ex: audiology _field [layer=(_Caps )] <- audiology [s] @@ # Ex: astrophysics _field [layer=(_Caps )] <- astrophysics [s] @@ # Ex: astronomy _field [layer=(_Caps )] <- astronomy [s] @@ # Ex: astronautics _field [layer=(_Caps )] <- astronautics [s] @@ # Ex: art _field [layer=(_Caps )] <- art [s] @@ # Ex: architecture _field [layer=(_Caps )] <- architecture [s] @@ # Ex: archaeology _field [layer=(_Caps )] <- archaeology [s] @@ # Ex: anthropology _field [layer=(_Caps )] <- anthropology [s] @@ # Ex: anatomy _field [layer=(_Caps )] <- anatomy [s] @@ # Ex: agronomy _field [layer=(_Caps )] <- agronomy [s] @@ # Ex: agribusiness _field [layer=(_Caps )] <- agribusiness [s] @@ # Ex: africology _field [layer=(_Caps )] <- africology [s] @@ # Ex: aesthetics _field [layer=(_Caps )] <- aesthetics [s] @@ # Ex: aeronautics _field [layer=(_Caps )] <- aeronautics [s] @@ # Ex: advertising _field [layer=(_Caps )] <- advertising [s] @@ # Ex: acoustics _field [layer=(_Caps )] <- acoustics [s] @@ # Ex: accounting _field [layer=(_Caps )] <- accounting [s] @@ # Ex: accountancy _field [layer=(_Caps )] <- accountancy [s] @@
# Fetch concept's list of attributes. L("return_attr") = findattrs(L("con"));
@NODES _ROOT @PRE <1,1> var("header"); @POST splice(1,1); @RULES _xNIL <- _LINE ### (1) @@
@POST rfarecurse(2, 3, 5) single() @RULES _RECURSE [base] <- _soRECURSE [s] _LIT _REGIONS [opt] _eoRECURSE [s] _LIT [opt] @@
@NODES _ROOT @RULES _headerClose <- \< \/ h 4 \> @@ _headerOpen <- \< h 4 \> @@ _bottom <- bottom \- horizontal @@
@NODES _split @POST L("code") = N("code", 1); L("term") = N("term", 1); if (strcontains(".", L("code"))) { L("codes") = split(L("code"), "."); if (strlength(L("codes")[1]) > 1) { addEntryToHier(X("con"), L("code"), L("term")); } } noop(); @RULES _xNIL <- _entry ### (1) @@
@NODES _LINE @RULES _item <- _xSTART ### (1) _xWILD [plus match=(\# \*)] ### (2) _xWILD [fail=(_xEND)] ### (3) @@
@CODE G("hello") = 0; @@CODE #@PATH _ROOT _TEXTZONE _noun _caps @NODES _caps # Want an _xFEAT match! @CHECK if (N("sem",3) != "person title") fail(); if (N("ne",4) || N("ne",5)) fail(); @POST L("tmp3") = N(3); group(4,5,"_np"); N("ne",4) = 1; N("ne type",4) = N("sem",4) = "person"; N("ne type conf",4) = 80; N("stem",4) = N("$text",4); registerpersnames(N("$text",4)); group(1,3,"_np"); pncopyvars(L("tmp3"),N(1)); L("x3") = pnparent(X()); # 07/10/12 AM. pnreplaceval(L("x3"),"ne",0); # 07/10/12 AM. X("ne") = 0; # Need POS. @RULES _xNIL <- _xWILD [one match=(_xCAP _noun)] _xWILD [one match=(_xCAP _noun)] _noun _xWILD [one match=(_xCAP _noun)] _xWILD [one match=(_xCAP _noun)] @@ @POST L("x3") = pnparent(X()); # 07/10/12 AM. pnreplaceval(L("x3"),"ne",1); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type","organization"); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type conf",90); # 07/10/12 AM. pnreplaceval(L("x3"),"sem","organization"); # 07/10/12 AM. pnreplaceval(L("x3"),"stem",phrasetext()); # 07/10/12 AM. @RULES _xNIL <- _xWILD [plus match=(_xCAP _noun)] _xWILD [s one trigger match=(division department administration international ministry affairs hospital bank news tv school college university centre center)] _xEND @@ @CHECK if (!N("ne type",3)) fail(); @POST L("x3") = pnparent(X()); # 07/10/12 AM. pnreplaceval(L("x3"),"ne",1); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type",N("ne type",3)); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type conf",N("ne type conf",3)); # 07/10/12 AM. pnreplaceval(L("x3"),"sem",N("sem",3)); # 07/10/12 AM. pnreplaceval(L("x3"),"stem",N("stem",3)); # 07/10/12 AM. @RULES _xNIL <- _xSTART _xWILD [plus match=(_xCAP)] _noun _xEND @@ # Note: title name patterns. @CHECK if (!N("unknown",2) && !N("unknown",3)) fail(); @POST L("tt") = N("$text",3) + " " + N("$text",4); # 07/10/12 AM. L("x3") = pnparent(X()); # 07/10/12 AM. pnreplaceval(L("x3"),"ne",1); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type","person"); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type conf",90); # 07/10/12 AM. pnreplaceval(L("x3"),"sem","person"); # 07/10/12 AM. pnreplaceval(L("x3"),"stem",L("tt")); # 07/10/12 AM. # X("ne",3) = 1; # X("ne type",3) = X("sem",3) = "person"; # X("ne type conf",3) = 90; # X("stem",3) = N("$text",3) + " " + N("$text",4); N("mypos",3) = "NP"; N("mypos",4) = "NP"; # Todo: Register first and last name. @RULES _xNIL <- _xWILD [plus match=(_xCAP _noun)] _xWILD [s trigger plus match=(_nounCountry representative ceo coo cfo officer president senator congressman)] _xCAP _xCAP _xEND @@ # title alpha @POST L("tt") = N("$text",3); # 07/10/12 AM. L("x3") = pnparent(X()); # 07/10/12 AM. pnreplaceval(L("x3"),"ne",1); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type","person"); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type conf",90); # 07/10/12 AM. pnreplaceval(L("x3"),"sem","person"); # 07/10/12 AM. pnreplaceval(L("x3"),"stem",L("tt")); # 07/10/12 AM. # X("ne",3) = 1; # X("ne type",3) = X("sem",3) = "person"; # X("ne type conf",3) = 90; # X("stem",3) = N("$text",3); N("mypos",3) = "NP"; # Todo: Register last name. @RULES _xNIL <- _xSTART _title [s] _xALPHA _xEND @@ @CHECK if (!N("unknown",2)) fail(); if (!N("unknown",3)) fail(); @POST L("tt") = N("$text",2) + " " + N("$text",3); # 07/10/12 AM. L("x3") = pnparent(X()); # 07/10/12 AM. pnreplaceval(L("x3"),"ne",1); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type","person"); # 07/10/12 AM. pnreplaceval(L("x3"),"ne type conf",90); # 07/10/12 AM. pnreplaceval(L("x3"),"sem","person"); # 07/10/12 AM. pnreplaceval(L("x3"),"stem",L("tt")); # 07/10/12 AM. # X("ne",3) = 1; # X("ne type",3) = X("sem",3) = "person"; # X("ne type conf",3) = 90; # X("stem",3) = N("$text",2) + " " + N("$text",3); N("mypos",2) = "NP"; N("mypos",3) = "NP"; # Todo: Register first and last name. @RULES _xNIL <- _xSTART _xCAP _xCAP _xEND @@
@PATH _ROOT _LINE _column @POST excise(1,1); noop(); @RULES _xNIL <- _xWHITE [s] ### (1) @@
@NODES _doctypedecl @RULES _DeclSep <- _PEReference [one] @@ _DeclSep <- _whiteSpace [one] @@ @@RULES
# Remove attributes and values of concept con's attribute named str. rmvals(L("con"), L("str"));
@NODES _ROOT @RULES _companySuffix <- _xWILD [one match=(inc llc)] ### (1) \. ### (2) @@ @RULES _number <- no ### (1) \. ### (2) @@ @RULES _paragraphSeparator <- \n [s min=2 max=0] ### (1) @@ @POST L("tmp") = N("$text",2) + "." + N("$text",4); S("value") = flt(L("tmp")); single(); @RULES _money <- _xWILD [s one matches=(\$)] ### (1) _xNUM [s] ### (2) _xWILD [s one matches=( \. \, )] ### (3) _xNUM [s] ### (4) @@ @POST S("value") = num(N("$text")); single(); @RULES _money <- _xWILD [s one matches=(\$)] ### (1) _xNUM [s] ### (2) @@
@PATH _ROOT _educationZone _educationInstance _LINE # This one is for catching city turd to the right of school. # Accepting short turds, even if known words, that haven't been reduced # to anything. @POST if (!X("school",3)) X("school",3) = N("$text",1); if (N("unknowns",5) || (N("unreduced",5) && N("caplen",5) <= 2) ) X("city",3) = N("$text",5); @RULES _xNIL <- _school [s] _xWHITE [s star] _xWILD [s one match=( \, \- )] _xWHITE [s star] _Caps @@ # This is for "late" schools. Ones formed after higher confidence # stuff failed to find the school. @CHECK if (X("school",3)) fail(); # School not filled in yet. @POST X("school",3) = N("$text"); # Fill instance with school name. # noop() @RULES _xNIL <- _school [s] @@ # If no degree in major constructs found, etc., use standalones. @CHECK if (X("degree",3)) fail(); # No degree yet. @POST X("degree",3) = N("$text"); @RULES _xNIL <- _degree [s] @@ # Similarly for major. @CHECK if (X("major",3)) fail(); # No major yet. @POST X("major",3) = N("$text"); @RULES _xNIL <- _major [s] @@
@NODES _ROOT @RULES _xNIL <- _xNIL ### (1) @@
@NODES _ROOT @RULES _enum <- _beginEnum ### (1) _xWILD ### (2) _endEnum ### (3) @@
@NODES _ROOT @POST singler(1,2); @RULES _emptyItem [base] <- _enclosedBy ### (1) _enclosedBy ### (2) _xWILD [match=(_separator _lineTerminator _xEND)] ### (3) @@
@NODES _ROOT @POST S("text") = N("$text",3); single(); @RULES _comment <- \/ ### (1) \/ ### (2) _xWILD [fail=(\/ _pos)] ### (3) @@
@CODE G("filepath") = G("$apppath") + "\input\portverbs.txt"; "debug.txt" << G("filepath") << "\n"; if (G("$inputname") == "file001.txt") { G("file") = openfile(G("filepath")); } else { G("file") = openfile(G("filepath"),"app"); } @@CODE
@CODE fileout("lines1.txt"); # 06/10/00 AM. @@CODE # No way right now to initialize var in the context node. # Not in the current pass, anyway. # "nlines" will keep line count in every experience zone. @PATH _ROOT _experienceZone # Give each line a line number. @POST ++X("nlines"); N("lineno") = X("nlines"); # noop() # Implicit. "lines1.txt" << "------------" << "\n"; ndump("lines1.txt",1); @RULES _xNIL <- _LINE [s] @@
@POST rfanodes(2, nodes) single() @RULES _NODES [base] <- _soNODES _NONLIT [star] _eoNODES [opt] @@
@DECL NepaliNum(L("numString")){ if (L("numString") == "१") return 1; else if (L("numString") == "२") return 2; else if (L("numString") == "३") return 3; } @@DECL
@NODES _LINE @RULES _indent <- _xSTART _xWILD [match=(_xWHITE)] @@